hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
14dbbcca53098750ba46cd125e6701543cbb7914
| 35
|
py
|
Python
|
src/Cosmos/Security/__init__.py
|
cosmos-cms/cosmos-python
|
32f5fab1f503b2ab57176a82b3428e78f9ab591a
|
[
"MIT"
] | null | null | null |
src/Cosmos/Security/__init__.py
|
cosmos-cms/cosmos-python
|
32f5fab1f503b2ab57176a82b3428e78f9ab591a
|
[
"MIT"
] | null | null | null |
src/Cosmos/Security/__init__.py
|
cosmos-cms/cosmos-python
|
32f5fab1f503b2ab57176a82b3428e78f9ab591a
|
[
"MIT"
] | null | null | null |
from .Encryption import Encryption
| 17.5
| 34
| 0.857143
|
5bda47598f84e671e0ca8c98c9e7cbea8cd1cb65
| 676
|
py
|
Python
|
env/bin/django-admin.py
|
Shaurov05/t5-transformer-paraphraser
|
001dc7be903a408965639dce93c24600533d1ed4
|
[
"MIT"
] | null | null | null |
env/bin/django-admin.py
|
Shaurov05/t5-transformer-paraphraser
|
001dc7be903a408965639dce93c24600533d1ed4
|
[
"MIT"
] | 1
|
2020-11-28T18:41:02.000Z
|
2020-11-28T18:41:02.000Z
|
env/bin/django-admin.py
|
Shaurov05/t5-transformer-paraphraser
|
001dc7be903a408965639dce93c24600533d1ed4
|
[
"MIT"
] | null | null | null |
#!/home/parvez/paraphraser/env/bin/python
# When the django-admin.py deprecation ends, remove this script.
import warnings
from django.core import management
try:
from django.utils.deprecation import RemovedInDjango40Warning
except ImportError:
raise ImportError(
'django-admin.py was deprecated in Django 3.1 and removed in Django '
'4.0. Please manually remove this script from your virtual environment '
'and use django-admin instead.'
)
if __name__ == "__main__":
warnings.warn(
'django-admin.py is deprecated in favor of django-admin.',
RemovedInDjango40Warning,
)
management.execute_from_command_line()
| 30.727273
| 80
| 0.724852
|
de6d83dca7e8eddc6628cfe3a663a991d7b622c1
| 2,232
|
py
|
Python
|
backend/wispy_mode_30092/urls.py
|
crowdbotics-apps/rocket-pocket-30092
|
fc5ebcd5d3100745d62ad915b970132b68d903ce
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
backend/wispy_mode_30092/urls.py
|
crowdbotics-apps/rocket-pocket-30092
|
fc5ebcd5d3100745d62ad915b970132b68d903ce
|
[
"FTL",
"AML",
"RSA-MD"
] | 10
|
2021-09-07T13:22:01.000Z
|
2022-01-02T15:32:25.000Z
|
backend/wispy_mode_30092/urls.py
|
crowdbotics-apps/wispy-mode-30092
|
fc5ebcd5d3100745d62ad915b970132b68d903ce
|
[
"FTL",
"AML",
"RSA-MD"
] | null | null | null |
"""wispy_mode_30092 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Rocket Pocket"
admin.site.site_title = "Rocket Pocket Admin Portal"
admin.site.index_title = "Rocket Pocket Admin"
# swagger
api_info = openapi.Info(
title="Wispy Mode API",
default_version="v1",
description="API documentation for Wispy Mode App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| 35.428571
| 87
| 0.712814
|
7186459583288ce8dee50cbeeca578962ba7c568
| 13,692
|
py
|
Python
|
trikit/tests/test_triangle_ut.py
|
trikit/trikit
|
f77e9333a9d1c886f58aaf271d55b176c8f53262
|
[
"MIT"
] | 4
|
2021-01-15T15:53:10.000Z
|
2021-10-01T17:28:59.000Z
|
trikit/tests/test_triangle_ut.py
|
trikit/trikit
|
f77e9333a9d1c886f58aaf271d55b176c8f53262
|
[
"MIT"
] | null | null | null |
trikit/tests/test_triangle_ut.py
|
trikit/trikit
|
f77e9333a9d1c886f58aaf271d55b176c8f53262
|
[
"MIT"
] | 4
|
2021-02-14T01:55:51.000Z
|
2021-09-08T18:17:41.000Z
|
"""
trikit.triangle tests.
"""
import unittest
import pandas as pd
import numpy as np
import trikit
# IncrTriangle unit tests.
class IncrTriangleTestCase(unittest.TestCase):
def setUp(self):
data = trikit.load(dataset="raa")
self.tri = trikit.totri(data=data, tri_type="incremental")
self.latest_ref = pd.DataFrame({
"origin":list(range(1981, 1991, 1)), "maturity":list(range(10, 0, -1)),
"dev":list(range(10, 0, -1)),
"latest":[
172.0, 535.0, 603.0, 984.0, 225.0, 2917.0, 1368.0,
6165.0, 2262.0, 2063.0
],
}, index=list(range(0, 10, 1))
)
self.offset_1 = np.asarray([54., 673., 649., 2658., 3786., 1233., 6926., 5596., 3133.])
self.offset_2 = np.asarray([599., -103., 3479., 2159., 6333., 5257., 3463., 1351.])
self.offset_7 = np.asarray([2638., 4179., 3410.])
def test_nbr_cells(self):
self.assertEqual(
self.tri.nbr_cells, 55
)
def test_triind(self):
triindprod = (self.tri.triind * self.tri).sum().sum()
self.assertTrue(
np.allclose(triindprod, 0)
)
def test_rlvi(self):
ref = pd.DataFrame({
"dev":list(range(10, 0, -1)),
"col_offset":list(range(9, -1, -1)),
}, index=list(range(1981, 1991, 1))
).sort_index()
self.assertTrue(ref.equals(self.tri.rlvi))
def test_clvi(self):
ref = pd.DataFrame({
"origin":list(range(1990, 1980, -1)),
"row_offset":list(range(9, -1, -1)),
}, index=list(range(1, 11, 1))
).sort_index()
self.assertTrue(ref.equals(self.tri.clvi))
def test_devp(self):
ref = pd.Series(
data=self.latest_ref.dev.values.tolist()[::-1],
name="devp"
).sort_index()
self.assertTrue(ref.equals(self.tri.devp))
def test_origins(self):
ref = pd.Series(
data=self.latest_ref.origin.values.tolist(),
name="origin"
).sort_index()
self.assertTrue(ref.equals(self.tri.origins))
def test_maturity(self):
dfref = self.latest_ref[["origin", "maturity"]]
dftri = self.tri.maturity.to_frame().reset_index(drop=False).rename(
{"index":"origin", "maturity":"maturity_tri"}, axis=1)
dfcomp = dfref.merge(dftri, on="origin", how="left")
dfcomp["diff"] = dfcomp["maturity"] - dfcomp["maturity_tri"]
self.assertEqual(dfcomp["diff"].sum(), 0)
def test_latest(self):
dfref = self.latest_ref[["origin", "dev", "latest"]].sort_index()
dftri = self.tri.latest.sort_index()
self.assertEqual((dfref - dftri).sum().sum(), 0)
def test_latest_by_origin(self):
dfref = self.latest_ref[["origin", "latest"]].sort_index()
dftri = self.tri.latest_by_origin.reset_index(drop=False).rename(
{"index":"origin", "latest_by_origin":"latest_tri"}, axis=1)
dfcomp = dfref.merge(dftri, on="origin", how="left")
dfcomp["diff"] = dfcomp["latest"] - dfcomp["latest_tri"]
self.assertEqual(dfcomp["diff"].sum(), 0)
def test_latest_by_devp(self):
dfref = self.latest_ref[["dev", "latest"]].sort_index()
dftri = self.tri.latest_by_devp.reset_index(drop=False).rename(
{"index":"dev", "latest_by_devp":"latest_tri"}, axis=1)
dfcomp = dfref.merge(dftri, on="dev", how="left")
dfcomp["diff"] = dfcomp["latest"] - dfcomp["latest_tri"]
self.assertEqual(dfcomp["diff"].sum(), 0)
def test_to_tbl(self):
self.assertTrue(isinstance(self.tri.to_tbl(), pd.DataFrame))
def test_to_cum(self):
self.assertTrue(isinstance(self.tri.to_cum(), trikit.triangle.CumTriangle))
def test_diagonal(self):
tri_offset_1 = self.tri.diagonal(offset=-1).value.values
tri_offset_2 = self.tri.diagonal(offset=-2).value.values
tri_offset_7 = self.tri.diagonal(offset=-7).value.values
test_1 = np.allclose(tri_offset_1, self.offset_1)
test_2 = np.allclose(tri_offset_2, self.offset_2)
test_7 = np.allclose(tri_offset_7, self.offset_7)
self.assertTrue(test_1 and test_2 and test_7)
# CumTriangle unit tests.
class CumTriangleTestCase(unittest.TestCase):
def setUp(self):
raa = trikit.load(dataset="raa")
self.tri = trikit.totri(raa, tri_type="cumulative")
self.latest_ref = pd.DataFrame({
"origin":list(range(1981, 1991, 1)), "maturity":list(range(10, 0, -1)),
"dev":list(range(10, 0, -1)),
"latest":[18834.0, 16704.0, 23466.0, 27067.0, 26180.0, 15852.0, 12314.0, 13112.0, 5395.0, 2063.0],
}, index=list(range(0, 10, 1))
)
self.a2aref = pd.DataFrame({
1:[1.64984, 40.42453, 2.63695, 2.04332, 8.75916, 4.25975, 7.21724, 5.14212, 1.72199],
2:[1.31902, 1.25928, 1.54282, 1.36443, 1.65562, 1.81567, 2.72289, 1.88743, np.NaN],
3:[1.08233, 1.97665, 1.16348, 1.34885, 1.39991, 1.10537, 1.12498, np.NaN, np.NaN],
4:[1.14689, 1.29214, 1.16071, 1.10152, 1.17078, 1.22551, np.NaN, np.NaN, np.NaN],
5:[1.19514, 1.13184, 1.1857 , 1.11347, 1.00867,np.NaN, np.NaN, np.NaN, np.NaN],
6:[1.11297, 0.9934 , 1.02922, 1.03773, np.NaN, np.NaN,np.NaN, np.NaN, np.NaN],
7:[1.03326, 1.04343, 1.02637,np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
8:[1.0029 , 1.03309,np.NaN, np.NaN, np.NaN, np.NaN,np.NaN, np.NaN, np.NaN],
9:[1.00922,np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN],
}, index=list(range(1981, 1990))
)
self.tri_sparse = pd.DataFrame({
1 :[np.NaN, 300, 370, 288, 412, 800, 746, 422,],
2 :[np.NaN, 499, 501, 315, 222, np.NaN, 630, np.NaN],
3 :[np.NaN, 277, 418, np.NaN, 255, 525, np.NaN, np.NaN,],
4 :[148 , 168, np.NaN, 195, 223, np.NaN, np.NaN, np.NaN,],
5 :[np.NaN, 107, 125, 101, np.NaN, np.NaN, np.NaN, np.NaN,],
6 :[77 , 67, 90, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,],
7 :[np.NaN, 51, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,],
8 :[1 , np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN, np.NaN,],
}, index=range(1, 9)
)
def test_a2adim(self):
self.assertEqual(
self.tri.shape[0]-1, self.tri.a2a.shape[0],
"Number of age-to-age factors different than expected."
)
def test_a2a(self):
self.assertTrue(
np.abs((self.a2aref - self.tri.a2a).sum().sum())<.0001,
"Age-to-age Factors not properly computed."
)
def test_latest(self):
dfref = self.latest_ref[["origin", "dev", "latest"]].sort_index()
dftri = self.tri.latest.sort_index()
self.assertEqual((dfref - dftri).sum().sum(), 0)
def test_latest_by_origin(self):
dfref = self.latest_ref[["origin", "latest"]].sort_index()
dftri = self.tri.latest_by_origin.reset_index(drop=False).rename(
{"index":"origin", "latest_by_origin":"latest_tri"}, axis=1)
dfcomp = dfref.merge(dftri, on="origin", how="left")
dfcomp["diff"] = dfcomp["latest"] - dfcomp["latest_tri"]
self.assertTrue(np.allclose(dfcomp["diff"].sum(), 0))
def test_latest_by_devp(self):
dfref = self.latest_ref[["dev", "latest"]].sort_index()
dftri = self.tri.latest_by_devp.reset_index(drop=False).rename(
{"index":"dev", "latest_by_devp":"latest_tri"}, axis=1)
dfcomp = dfref.merge(dftri, on="dev", how="left")
dfcomp["diff"] = dfcomp["latest"] - dfcomp["latest_tri"]
self.assertTrue(np.allclose(dfcomp["diff"].sum(), 0))
def test_to_incr(self):
self.assertTrue(isinstance(self.tri.to_incr(), trikit.triangle.IncrTriangle))
def test_to_tbl(self):
self.assertTrue(isinstance(self.tri.to_tbl(), pd.DataFrame))
def test_a2a_ranks(self):
self.assertTrue(self.tri.ranked_a2a["s_2"].sum()==36)
def test_a2a_assignment(self):
self.assertTrue(self.tri.a2a_assignment.sum().sum()==0)
class ToTriTestCase(unittest.TestCase):
def setUp(self):
origin, dev, value = "origin", "dev", "value"
incrtab = trikit.load(dataset="raa")
# Create cumulative tabular data.
cumtab = incrtab.copy(deep=True).sort_values(by=["origin", "dev"]).reset_index(drop=True)
cumtab["cum"] = cumtab.groupby(["origin"], as_index=False)["value"].cumsum()
cumtab = cumtab.drop("value", axis=1).rename({"cum":"value"}, axis=1)
# Create incremental triangle data.
incrtri = incrtab[[origin, dev, value]]
incrtri = incrtri.groupby([origin, dev], as_index=False).sum()
incrtri = incrtri.sort_values(by=[origin, dev])
incrtri = incrtri.pivot(index=origin, columns=dev).rename_axis(None)
incrtri.columns = incrtri.columns.droplevel(0)
# Create cumulative triangle data.
cumtri = incrtab[[origin, dev, value]]
cumtri = cumtri.groupby([origin, dev], as_index=False).sum()
cumtri = cumtri.sort_values(by=[origin, dev])
cumtri = cumtri.pivot(index=origin, columns=dev).rename_axis(None)
cumtri.columns = cumtri.columns.droplevel(0)
self.incrtab = incrtab
self.cumtab = cumtab
self.incrtri = incrtri
self.cumtri = cumtri
self.incr_latest_ref = pd.DataFrame({
"origin":list(range(1981, 1991, 1)), "maturity":list(range(10, 0, -1)),
"dev":list(range(10, 0, -1)),
"latest":[172.0, 535.0, 603.0, 984.0, 225.0, 2917.0, 1368.0,6165.0, 2262.0, 2063.0],
}, index=list(range(0, 10, 1))
)
self.cum_latest_ref = pd.DataFrame({
"origin":list(range(1981, 1991, 1)), "maturity":list(range(10, 0, -1)),
"dev":list(range(10, 0, -1)),
"latest":[18834.0, 16704.0, 23466.0, 27067.0, 26180.0, 15852.0, 12314.0, 13112.0, 5395.0, 2063.0],
}, index=list(range(0, 10, 1))
)
def test_cumtab_2_incrtri(self):
# Convert cumulative tabular data to incr triangle.
tri = trikit.totri(self.cumtab, tri_type="incr", data_format="cum", data_shape="tabular")
self.assertTrue(
isinstance(tri, trikit.triangle.IncrTriangle),
"Error converting cum tabular data to incr tri."
)
def test_cumtab_2_cumtri(self):
# Convert cumulative tabular data to cum triangle.
tri = trikit.totri(self.cumtab, tri_type="cum", data_format="cum", data_shape="tabular")
self.assertTrue(
isinstance(tri, trikit.triangle.CumTriangle),
"Error converting cum tabular data to cum tri."
)
def test_incrtab_2_incrtri(self):
# Convert incremental tabular data to incr triangle.
tri = trikit.totri(self.incrtab, tri_type="incr", data_format="incr", data_shape="tabular")
self.assertTrue(
isinstance(tri, trikit.triangle.IncrTriangle),
"Error converting incr tabular data to incr tri."
)
def test_incrtab_2_cumtri(self):
# Convert incremental tabular data to cum triangle.
tri = trikit.totri(self.incrtab, tri_type="cum", data_format="incr", data_shape="tabular")
self.assertTrue(
isinstance(tri, trikit.triangle.CumTriangle),
"Error converting incr tabular data to cum tri."
)
def test_incrtri_2_incrtri(self):
# Convert incremental DataFrame tri to incr triangle.
tri = trikit.totri(self.incrtri, tri_type="incr", data_format="incr", data_shape="triangle")
self.assertTrue(
isinstance(tri, trikit.triangle.IncrTriangle),
"Error converting incr tri data to incr tri."
)
def test_incrtri_2_cumtri(self):
# Convert incremental DataFrame tri to cum triangle.
tri = trikit.totri(self.incrtri, tri_type="cum", data_format="incr", data_shape="triangle")
self.assertTrue(
isinstance(tri, trikit.triangle.CumTriangle),
"Error converting incr tri data to cum tri."
)
def test_cumtri_2_incrtri(self):
# Convert cumulative DataFrame tri to incr triangle.
tri = trikit.totri(self.cumtri, tri_type="incr", data_format="cum", data_shape="triangle")
self.assertTrue(
isinstance(tri, trikit.triangle.IncrTriangle),
"Error converting cum tri data to incr tri."
)
def test_cumtri_2_cumtri(self):
# Convert cumulative DataFrame tri to cum triangle.
tri = trikit.totri(self.cumtri, tri_type="cum", data_format="cum", data_shape="triangle")
self.assertTrue(
isinstance(tri, trikit.triangle.CumTriangle),
"Error converting cumtri data to cumtri."
)
def test_alt_colnames(self):
# Create triangle with different origin, dev and value names.
dfrnm = self.incrtab.rename(
{"origin":"ay", "dev":"devp", "value":"loss_amt"}, axis=1
)
tri = trikit.totri(
dfrnm, tri_type="cum", data_format="incr", data_shape="tabular",
origin="ay", dev="devp", value="loss_amt"
)
self.assertTrue(
isinstance(tri, trikit.triangle.CumTriangle),
"Error converting cumtri data to cumtri."
)
if __name__ == "__main__":
unittest.main()
| 37.927978
| 110
| 0.588446
|
ba1e84293f8d71a1b0be848d40a9656c24325b7b
| 712
|
py
|
Python
|
project_cl/tools/stats.py
|
zhyever/SimIPU
|
5b346e392c161a5e9fdde09b1692656bc7cd3faf
|
[
"Apache-2.0"
] | 29
|
2021-09-29T13:31:12.000Z
|
2022-03-15T13:31:25.000Z
|
project_cl/tools/stats.py
|
zhyever/SimIPU
|
5b346e392c161a5e9fdde09b1692656bc7cd3faf
|
[
"Apache-2.0"
] | 3
|
2021-12-13T01:21:12.000Z
|
2022-02-24T01:46:14.000Z
|
project_cl/tools/stats.py
|
zhyever/SimIPU
|
5b346e392c161a5e9fdde09b1692656bc7cd3faf
|
[
"Apache-2.0"
] | 1
|
2021-12-03T08:39:18.000Z
|
2021-12-03T08:39:18.000Z
|
import mmcv
import pickle
import os
root = '/mnt/share_data/waymo_dataset/kitti_format'
# f3 = open(os.path.join(root,'two_percentage.pkl'),'rb') # 1% -> 1580, full_data->158081
f3 = open(os.path.join(root,'one_percentage_val.pkl'),'rb')
train = pickle.load(f3)
counts = {"Car": 0, "Ped":0, "Cyc":0}
for item in train:
mask = item['annos']['camera_id'] == 0
label = item['annos']['name'][mask]
for label_item in label:
if label_item == "Car":
counts["Car"]+=1
elif label_item == "Pedestrian":
counts["Ped"]+=1
else:
counts["Cyc"]+=1
# print(train[-1]['annos']['camera_id'])
# print(train[-1]['annos']['name'])
print(counts)
| 20.941176
| 89
| 0.585674
|
08dfe29687727f638ecd865ba6d2042260ae82ba
| 23,521
|
py
|
Python
|
ckan/tests/legacy/lib/test_dictization.py
|
larrycameron80/ckan
|
fbab8f51b36a293206fdd998d71ece27d2565951
|
[
"Apache-2.0"
] | 1
|
2022-02-14T20:25:34.000Z
|
2022-02-14T20:25:34.000Z
|
ckan/tests/legacy/lib/test_dictization.py
|
larrycameron80/ckan
|
fbab8f51b36a293206fdd998d71ece27d2565951
|
[
"Apache-2.0"
] | 4
|
2020-03-24T17:53:23.000Z
|
2021-03-31T19:19:03.000Z
|
ckan/tests/legacy/lib/test_dictization.py
|
larrycameron80/ckan
|
fbab8f51b36a293206fdd998d71ece27d2565951
|
[
"Apache-2.0"
] | 3
|
2020-01-02T10:32:37.000Z
|
2021-12-22T07:20:21.000Z
|
# encoding: utf-8
from __future__ import print_function
from nose.tools import assert_equal, assert_not_in, assert_in
from pprint import pprint, pformat
from difflib import unified_diff
import ckan.lib.search as search
from ckan.lib.create_test_data import CreateTestData
from ckan import model
from ckan.lib.dictization import (table_dictize,
table_dict_save)
from ckan.lib.dictization.model_dictize import (package_dictize,
resource_dictize,
package_to_api1,
package_to_api2,
user_dictize,
)
from ckan.lib.dictization.model_save import (package_dict_save,
resource_dict_save,
activity_dict_save,
package_api_to_dict,
group_api_to_dict,
package_tag_list_save,
)
import ckan.logic.action.get
class TestBasicDictize:
@classmethod
def setup_class(cls):
# clean the db so we can run these tests on their own
model.repo.rebuild_db()
search.clear_all()
CreateTestData.create()
cls.package_expected = {
u'author': None,
u'author_email': None,
u'creator_user_id': None,
'extras': [
# extras are no longer revisioned so we get the latest version
{'key': u'david', 'state': u'active', 'value': u'new_value'},
{'key': u'genre', 'state': u'active', 'value': u'new_value'},
{'key': u'original media', 'state': u'active',
'value': u'book'}
],
'groups': [{
u'name': u'david',
u'capacity': u'public',
u'image_url': u'',
u'image_display_url': u'',
u'description': u'These are books that David likes.',
u'display_name': u"Dave's books",
u'type': u'group',
u'state': u'active',
u'is_organization': False,
u'title': u"Dave's books",
u"approval_status": u"approved"},
{
u'name': u'roger',
u'capacity': u'public',
u'description': u'Roger likes these books.',
u'image_url': u'',
'image_display_url': u'',
'display_name': u"Roger's books",
u'type': u'group',
u'state': u'active',
u'is_organization': False,
u'title': u"Roger's books",
u"approval_status": u"approved"}],
'isopen': True,
u'license_id': u'other-open',
'license_title': u'Other (Open)',
'organization': None,
u'owner_org': None,
u'maintainer': None,
u'maintainer_email': None,
u'name': u'annakarenina',
u'notes': u'Some test notes\n\n### A 3rd level heading\n\n**Some bolded text.**\n\n*Some italicized text.*\n\nForeign characters:\nu with umlaut \xfc\n66-style quote \u201c\nforeign word: th\xfcmb\n\nNeeds escaping:\nleft arrow <\n\n<http://ckan.net/>\n\n',
'num_resources': 2,
'num_tags': 3,
u'private': False,
'relationships_as_object': [],
'relationships_as_subject': [],
'resources': [{u'alt_url': u'alt123',
u'cache_last_updated': None,
u'cache_url': None,
u'description': u'Full text. Needs escaping: " Umlaut: \xfc',
u'format': u'plain text',
u'hash': u'abc123',
u'last_modified': None,
u'mimetype': None,
u'mimetype_inner': None,
u'name': None,
u'position': 0,
u'resource_type': None,
u'size': None,
u'size_extra': u'123',
u'url_type': None,
u'state': u'active',
u'url': u'http://datahub.io/download/x=1&y=2',},
{u'alt_url': u'alt345',
u'cache_last_updated': None,
u'cache_url': None,
u'description': u'Index of the novel',
u'format': u'JSON',
u'hash': u'def456',
u'last_modified': None,
u'mimetype': None,
u'mimetype_inner': None,
u'name': None,
u'position': 1,
u'resource_type': None,
u'url_type': None,
u'size': None,
u'size_extra': u'345',
u'state': u'active',
u'url': u'http://datahub.io/index.json'}],
u'state': u'active',
'tags': [{u'name': u'Flexible \u30a1',
'display_name': u'Flexible \u30a1',
u'state': u'active'},
{'display_name': u'russian',
u'name': u'russian',
u'state': u'active'},
{'display_name': u'tolstoy',
u'name': u'tolstoy',
u'state': u'active'}],
u'title': u'A Novel By Tolstoy',
u'type': u'dataset',
u'url': u'http://datahub.io',
u'version': u'0.7a',
}
@classmethod
def teardown_class(cls):
model.repo.rebuild_db()
model.Session.remove()
def remove_changable_columns(self, dict, remove_package_id=False):
ids_to_keep = ['license_id', 'creator_user_id']
if not remove_package_id:
ids_to_keep.append('package_id')
for key, value in dict.items():
if key.endswith('id') and key not in ids_to_keep:
dict.pop(key)
if key == 'created':
dict.pop(key)
if 'timestamp' in key:
dict.pop(key)
if key in ['metadata_created','metadata_modified']:
dict.pop(key)
if isinstance(value, list):
for new_dict in value:
self.remove_changable_columns(new_dict,
key in ['resources', 'extras'] or remove_package_id)
return dict
def test_03_package_to_api1(self):
context = {"model": model,
"session": model.Session}
pkg = model.Session.query(model.Package).filter_by(name='annakarenina').first()
pprint(package_to_api1(pkg, context))
pprint(pkg.as_dict())
asdict = pkg.as_dict()
asdict['download_url'] = asdict['resources'][0]['url']
asdict['license_title'] = u'Other (Open)'
asdict['num_tags'] = 3
asdict['num_resources'] = 2
dictize = package_to_api1(pkg, context)
# the is_dict method doesn't care about organizations
del dictize['organization']
assert dictize == asdict
def test_04_package_to_api1_with_relationship(self):
context = {"model": model,
"session": model.Session}
create = CreateTestData
create.create_family_test_data()
pkg = model.Session.query(model.Package).filter_by(name='homer').one()
as_dict = pkg.as_dict()
as_dict['license_title'] = None
as_dict['num_tags'] = 0
as_dict['num_resources'] = 0
dictize = package_to_api1(pkg, context)
as_dict["relationships"].sort(key=lambda x:x.items())
dictize["relationships"].sort(key=lambda x:x.items())
# the is_dict method doesn't care about organizations
del dictize['organization']
as_dict_string = pformat(as_dict)
dictize_string = pformat(dictize)
print(as_dict_string)
print(dictize_string)
assert as_dict == dictize, "\n".join(unified_diff(as_dict_string.split("\n"), dictize_string.split("\n")))
def test_05_package_to_api2(self):
context = {"model": model,
"session": model.Session}
pkg = model.Session.query(model.Package).filter_by(name='annakarenina').first()
as_dict = pkg.as_dict(ref_package_by='id', ref_group_by='id')
dictize = package_to_api2(pkg, context)
as_dict_string = pformat(as_dict)
dictize_string = pformat(dictize)
print(as_dict_string)
print(dictize_string)
assert package_to_api2(pkg, context) == dictize, "\n".join(unified_diff(as_dict_string.split("\n"), dictize_string.split("\n")))
def test_06_package_to_api2_with_relationship(self):
context = {"model": model,
"session": model.Session}
pkg = model.Session.query(model.Package).filter_by(name='homer').one()
as_dict = pkg.as_dict(ref_package_by='id', ref_group_by='id')
as_dict['license_title'] = None
as_dict['num_tags'] = 0
as_dict['num_resources'] = 0
dictize = package_to_api2(pkg, context)
as_dict["relationships"].sort(key=lambda x:x.items())
dictize["relationships"].sort(key=lambda x:x.items())
# the is_dict method doesn't care about organizations
del dictize['organization']
as_dict_string = pformat(as_dict)
dictize_string = pformat(dictize)
print(as_dict_string)
print(dictize_string)
assert as_dict == dictize, "\n".join(unified_diff(as_dict_string.split("\n"), dictize_string.split("\n")))
def test_07_table_simple_save(self):
context = {"model": model,
"session": model.Session}
anna1 = model.Session.query(model.Package).filter_by(name='annakarenina').one()
anna_dictized = self.remove_changable_columns(table_dictize(anna1, context))
anna_dictized["name"] = 'annakarenina2'
table_dict_save(anna_dictized, model.Package, context)
model.Session.commit()
pkg = model.Session.query(model.Package).filter_by(name='annakarenina2').one()
assert self.remove_changable_columns(table_dictize(pkg, context)) == anna_dictized, self.remove_changable_columns(table_dictize(pkg, context))
def test_08_package_save(self):
context = {"model": model,
"user": 'testsysadmin',
"session": model.Session}
anna1 = model.Session.query(model.Package).filter_by(name='annakarenina').one()
anna_dictized = self.remove_changable_columns(package_dictize(anna1, context))
anna_dictized["name"] = u'annakarenina3'
package_dict_save(anna_dictized, context)
model.Session.commit()
# Re-clean anna_dictized
anna_dictized = self.remove_changable_columns(anna_dictized)
pkg = model.Session.query(model.Package).filter_by(name='annakarenina3').one()
package_dictized = self.remove_changable_columns(package_dictize(pkg, context))
anna_original = pformat(anna_dictized)
anna_after_save = pformat(package_dictized)
assert package_dictized == anna_dictized,\
"\n".join(unified_diff(anna_original.split("\n"), anna_after_save.split("\n")))
def test_14_resource_no_id(self):
context = {"model": model,
"session": model.Session}
model.Session.commit()
new_resource = {
'mimetype': None,
u'alt_url': u'empty resource group id',
'hash': u'abc123',
'description': u'Full text. Needs escaping: " Umlaut: \xfc',
'format': u'plain text',
'url': u'http://test_new',
'cache_url': None,
'cache_last_updated': None,
'state': u'active',
'mimetype_inner': None,
'url_type': None,
'last_modified': None,
'position': 0,
'size': None,
'size_extra': u'123',
'resource_type': None,
'name': None,
'package_id':'' # Just so we can save
}
resource_dict_save(new_resource, context)
model.Session.commit()
model.Session.remove()
# Remove the package id
del new_resource['package_id']
res = model.Session.query(model.Resource).filter_by(url=u'http://test_new').one()
res_dictized = self.remove_changable_columns(resource_dictize(res, context), True)
assert res_dictized == new_resource, res_dictized
def test_15_api_to_dictize(self):
context = {"model": model,
'api_version': 1,
"session": model.Session}
api_data = {
'name' : u'testpkg',
'title': u'Some Title',
'url': u'http://blahblahblah.mydomain',
'resources': [ {
u'url':u'http://blah.com/file2.xml',
u'format':u'xml',
u'description':u'Second file',
u'hash':u'def123',
u'alt_url':u'alt_url',
u'size':u'200',
},
{
u'url':u'http://blah.com/file.xml',
u'format':u'xml',
u'description':u'Main file',
u'hash':u'abc123',
u'alt_url':u'alt_url',
u'size':u'200',
},
],
'tags': u'russion novel',
'license_id': u'gpl-3.0',
'extras': {
'genre' : u'horror',
'media' : u'dvd',
},
}
dictized = package_api_to_dict(api_data, context)
assert dictized == {'extras': [{'key': 'genre', 'value': u'horror'},
{'key': 'media', 'value': u'dvd'}],
'license_id': u'gpl-3.0',
'name': u'testpkg',
'resources': [{u'alt_url': u'alt_url',
u'description': u'Second file',
u'size': u'200',
u'format': u'xml',
u'hash': u'def123',
u'url': u'http://blah.com/file2.xml'},
{u'alt_url': u'alt_url',
u'description': u'Main file',
u'size': u'200',
u'format': u'xml',
u'hash': u'abc123',
u'url': u'http://blah.com/file.xml'}],
'tags': [{'name': u'russion'}, {'name': u'novel'}],
'title': u'Some Title',
'url': u'http://blahblahblah.mydomain'}
package_dict_save(dictized, context)
model.Session.commit()
model.Session.remove()
pkg = model.Session.query(model.Package).filter_by(name=u'testpkg').one()
package_dictized = self.remove_changable_columns(package_dictize(pkg, context))
def test_17_group_apis_to_dict(self):
context = {"model": model,
"session": model.Session}
api_group = {
'name' : u'testgroup',
'title' : u'Some Group Title',
'description' : u'Great group!',
'packages' : [u'annakarenina', u'warandpeace'],
}
assert group_api_to_dict(api_group, context) == {'description': u'Great group!',
'name': u'testgroup',
'packages': [{'id': u'annakarenina'}, {'id': u'warandpeace'}],
'title': u'Some Group Title'}, pformat(group_api_to_dict(api_group, context))
def test_18_package_tag_list_save(self):
name = u'testpkg18'
context = {'model': model,
'session': model.Session}
pkg_dict = {'name': name}
package = table_dict_save(pkg_dict, model.Package, context)
tag_dicts = [{'name': 'tag1'}, {'name': 'tag2'}]
package_tag_list_save(tag_dicts, package, context)
model.repo.commit_and_remove()
pkg = model.Package.by_name(name)
assert_equal(set([tag.name for tag in pkg.get_tags()]),
set(('tag1', 'tag2')))
def test_19_package_tag_list_save_duplicates(self):
name = u'testpkg19'
context = {'model': model,
'session': model.Session}
pkg_dict = {'name': name}
package = table_dict_save(pkg_dict, model.Package, context)
tag_dicts = [{'name': 'tag1'}, {'name': 'tag1'}] # duplicate
package_tag_list_save(tag_dicts, package, context)
model.repo.commit_and_remove()
pkg = model.Package.by_name(name)
assert_equal(set([tag.name for tag in pkg.get_tags()]), set(('tag1',)))
def test_20_activity_save(self):
# Add a new Activity object to the database by passing a dict to
# activity_dict_save()
context = {"model": model, "session": model.Session}
user = model.User.by_name(u'tester')
sent = {
'user_id': user.id,
'object_id': user.id,
'activity_type': 'changed user'
}
activity_dict_save(sent, context)
model.Session.commit()
# Retrieve the newest Activity object from the database, check that its
# attributes match those of the dict we saved.
got = ckan.logic.action.get.user_activity_list(context,
{'id': user.id})[0]
assert got['user_id'] == sent['user_id']
assert got['object_id'] == sent['object_id']
assert got['activity_type'] == sent['activity_type']
# The activity object should also have an ID and timestamp.
assert got['id']
assert got['timestamp']
def test_21_package_dictization_with_deleted_group(self):
"""
Ensure that the dictization does not return groups that the dataset has
been removed from.
"""
# Create a new dataset and 2 new groups
pkg = model.Package(name='testing-deleted-groups')
group_1 = model.Group(name='test-group-1')
group_2 = model.Group(name='test-group-2')
model.Session.add(pkg)
model.Session.add(group_1)
model.Session.add(group_2)
model.Session.flush()
# Add the dataset to group_1, and signal that the dataset used
# to be a member of group_2 by setting its membership state to 'deleted'
membership_1 = model.Member(table_id = pkg.id,
table_name = 'package',
group = group_1,
group_id = group_1.id,
state = 'active')
membership_2 = model.Member(table_id = pkg.id,
table_name = 'package',
group = group_2,
group_id = group_2.id,
state = 'deleted')
model.Session.add(membership_1)
model.Session.add(membership_2)
model.repo.commit()
# Dictize the dataset
context = {"model": model,
"session": model.Session}
result = package_dictize(pkg, context)
self.remove_changable_columns(result)
assert_not_in('test-group-2', [ g['name'] for g in result['groups'] ])
assert_in('test-group-1', [ g['name'] for g in result['groups'] ])
def test_22_user_dictize_as_sysadmin(self):
'''Sysadmins should be allowed to see certain sensitive data.'''
context = {
'model': model,
'session': model.Session,
'user': 'testsysadmin',
}
user = model.User.by_name('tester')
user_dict = user_dictize(user, context)
# Check some of the non-sensitive data
assert 'name' in user_dict
assert 'about' in user_dict
# Check sensitive data is available
assert 'apikey' in user_dict
assert 'email' in user_dict
# Passwords and reset keys should never be available
assert 'password' not in user_dict
assert 'reset_key' not in user_dict
def test_23_user_dictize_as_same_user(self):
'''User should be able to see their own sensitive data.'''
context = {
'model': model,
'session': model.Session,
'user': 'tester',
}
user = model.User.by_name('tester')
user_dict = user_dictize(user, context)
# Check some of the non-sensitive data
assert 'name' in user_dict
assert 'about' in user_dict
# Check sensitive data is available
assert 'apikey' in user_dict
assert 'email' in user_dict
# Passwords and reset keys should never be available
assert 'password' not in user_dict
assert 'reset_key' not in user_dict
def test_24_user_dictize_as_other_user(self):
'''User should not be able to see other's sensitive data.'''
context = {
'model': model,
'session': model.Session,
'user': 'annafan',
}
user = model.User.by_name('tester')
user_dict = user_dictize(user, context)
# Check some of the non-sensitive data
assert 'name' in user_dict
assert 'about' in user_dict
# Check sensitive data is not available
assert 'apikey' not in user_dict
assert 'reset_key' not in user_dict
assert 'email' not in user_dict
# Passwords should never be available
assert 'password' not in user_dict
def test_25_user_dictize_as_anonymous(self):
'''Anonymous should not be able to see other's sensitive data.'''
context = {
'model': model,
'session': model.Session,
'user': '',
}
user = model.User.by_name('tester')
user_dict = user_dictize(user, context)
# Check some of the non-sensitive data
assert 'name' in user_dict
assert 'about' in user_dict
# Check sensitive data is not available
assert 'apikey' not in user_dict
assert 'reset_key' not in user_dict
assert 'email' not in user_dict
# Passwords should never be available
assert 'password' not in user_dict
| 37.937097
| 269
| 0.516007
|
61748b7483ec3e27b01e7d1624edfa6db25f8bd2
| 18,012
|
py
|
Python
|
tests/test_order.py
|
mmmcorpsvit/saleor
|
09e40a2af549109fbc2f8e82c68a195053e8224e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_order.py
|
mmmcorpsvit/saleor
|
09e40a2af549109fbc2f8e82c68a195053e8224e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_order.py
|
mmmcorpsvit/saleor
|
09e40a2af549109fbc2f8e82c68a195053e8224e
|
[
"BSD-3-Clause"
] | null | null | null |
from decimal import Decimal
import pytest
from django.urls import reverse
from django_countries.fields import Country
from payments import FraudStatus, PaymentStatus
from prices import Money, TaxedMoney
from tests.utils import get_redirect_location
from saleor.account.models import User
from saleor.checkout.utils import create_order
from saleor.core.exceptions import InsufficientStock
from saleor.core.utils.taxes import (
DEFAULT_TAX_RATE_NAME, get_tax_rate_by_name, get_taxes_for_country)
from saleor.order import FulfillmentStatus, OrderStatus, models
from saleor.order.models import Order
from saleor.order.utils import (
add_variant_to_order, cancel_fulfillment, cancel_order, recalculate_order,
restock_fulfillment_lines, restock_order_lines, update_order_prices,
update_order_status)
def test_total_setter():
price = TaxedMoney(net=Money(10, 'USD'), gross=Money(15, 'USD'))
order = models.Order()
order.total = price
assert order.total_net == Money(10, 'USD')
assert order.total.net == Money(10, 'USD')
assert order.total_gross == Money(15, 'USD')
assert order.total.gross == Money(15, 'USD')
assert order.total.tax == Money(5, 'USD')
def test_order_get_subtotal(order_with_lines):
order_with_lines.discount_name = "Test discount"
order_with_lines.discount_amount = (
order_with_lines.total.gross * Decimal('0.5'))
recalculate_order(order_with_lines)
target_subtotal = order_with_lines.total - order_with_lines.shipping_price
target_subtotal += order_with_lines.discount_amount
assert order_with_lines.get_subtotal() == target_subtotal
def test_get_tax_rate_by_name(taxes):
rate_name = 'pharmaceuticals'
tax_rate = get_tax_rate_by_name(rate_name, taxes)
assert tax_rate == taxes[rate_name]['value']
def test_get_tax_rate_by_name_fallback_to_standard(taxes):
rate_name = 'unexisting tax rate'
tax_rate = get_tax_rate_by_name(rate_name, taxes)
assert tax_rate == taxes[DEFAULT_TAX_RATE_NAME]['value']
def test_get_tax_rate_by_name_empty_taxes(product):
rate_name = 'unexisting tax rate'
tax_rate = get_tax_rate_by_name(rate_name)
assert tax_rate == 0
def test_add_variant_to_order_adds_line_for_new_variant(
order_with_lines, product, taxes, product_translation_fr, settings):
order = order_with_lines
variant = product.variants.get()
lines_before = order.lines.count()
settings.LANGUAGE_CODE = 'fr'
add_variant_to_order(order, variant, 1, taxes=taxes)
line = order.lines.last()
assert order.lines.count() == lines_before + 1
assert line.product_sku == variant.sku
assert line.quantity == 1
assert line.unit_price == TaxedMoney(
net=Money('8.13', 'USD'), gross=Money(10, 'USD'))
assert line.tax_rate == taxes[product.tax_rate]['value']
assert line.translated_product_name == variant.display_product(
translated=True)
@pytest.mark.parametrize('track_inventory', (True, False))
def test_add_variant_to_order_allocates_stock_for_new_variant(
order_with_lines, product, track_inventory):
variant = product.variants.get()
variant.track_inventory = track_inventory
variant.save()
stock_before = variant.quantity_allocated
add_variant_to_order(order_with_lines, variant, 1)
variant.refresh_from_db()
if track_inventory:
assert variant.quantity_allocated == stock_before + 1
else:
assert variant.quantity_allocated == stock_before
def test_add_variant_to_order_edits_line_for_existing_variant(
order_with_lines):
existing_line = order_with_lines.lines.first()
variant = existing_line.variant
lines_before = order_with_lines.lines.count()
line_quantity_before = existing_line.quantity
add_variant_to_order(order_with_lines, variant, 1)
existing_line.refresh_from_db()
assert order_with_lines.lines.count() == lines_before
assert existing_line.product_sku == variant.sku
assert existing_line.quantity == line_quantity_before + 1
def test_add_variant_to_order_allocates_stock_for_existing_variant(
order_with_lines):
existing_line = order_with_lines.lines.first()
variant = existing_line.variant
stock_before = variant.quantity_allocated
add_variant_to_order(order_with_lines, variant, 1)
variant.refresh_from_db()
assert variant.quantity_allocated == stock_before + 1
def test_add_variant_to_order_allow_overselling(order_with_lines):
existing_line = order_with_lines.lines.first()
variant = existing_line.variant
stock_before = variant.quantity_allocated
quantity = variant.quantity + 1
with pytest.raises(InsufficientStock):
add_variant_to_order(
order_with_lines, variant, quantity, allow_overselling=False)
add_variant_to_order(
order_with_lines, variant, quantity, allow_overselling=True)
variant.refresh_from_db()
assert variant.quantity_allocated == stock_before + quantity
def test_view_connect_order_with_user_authorized_user(
order, authorized_client, customer_user):
order.user_email = customer_user.email
order.save()
url = reverse(
'order:connect-order-with-user', kwargs={'token': order.token})
response = authorized_client.post(url)
redirect_location = get_redirect_location(response)
assert redirect_location == reverse('order:details', args=[order.token])
order.refresh_from_db()
assert order.user == customer_user
def test_view_connect_order_with_user_different_email(
order, authorized_client, customer_user):
"""Order was placed from different email, than user's
we are trying to assign it to."""
order.user = None
order.user_email = 'example_email@email.email'
order.save()
assert order.user_email != customer_user.email
url = reverse(
'order:connect-order-with-user', kwargs={'token': order.token})
response = authorized_client.post(url)
redirect_location = get_redirect_location(response)
assert redirect_location == reverse('account:details')
order.refresh_from_db()
assert order.user is None
def test_view_order_with_deleted_variant(authorized_client, order_with_lines):
order = order_with_lines
order_details_url = reverse('order:details', kwargs={'token': order.token})
# delete a variant associated to the order
order.lines.first().variant.delete()
# check if the order details view handles the deleted variant
response = authorized_client.get(order_details_url)
assert response.status_code == 200
def test_view_fulfilled_order_with_deleted_variant(
authorized_client, fulfilled_order):
order = fulfilled_order
order_details_url = reverse('order:details', kwargs={'token': order.token})
# delete a variant associated to the order
order.lines.first().variant.delete()
# check if the order details view handles the deleted variant
response = authorized_client.get(order_details_url)
assert response.status_code == 200
@pytest.mark.parametrize('track_inventory', (True, False))
def test_restock_order_lines(order_with_lines, track_inventory):
order = order_with_lines
line_1 = order.lines.first()
line_2 = order.lines.last()
line_1.variant.track_inventory = track_inventory
line_2.variant.track_inventory = track_inventory
line_1.variant.save()
line_2.variant.save()
stock_1_quantity_allocated_before = line_1.variant.quantity_allocated
stock_2_quantity_allocated_before = line_2.variant.quantity_allocated
stock_1_quantity_before = line_1.variant.quantity
stock_2_quantity_before = line_2.variant.quantity
restock_order_lines(order)
line_1.variant.refresh_from_db()
line_2.variant.refresh_from_db()
if track_inventory:
assert line_1.variant.quantity_allocated == (
stock_1_quantity_allocated_before - line_1.quantity)
assert line_2.variant.quantity_allocated == (
stock_2_quantity_allocated_before - line_2.quantity)
else:
assert line_1.variant.quantity_allocated == (
stock_1_quantity_allocated_before)
assert line_2.variant.quantity_allocated == (
stock_2_quantity_allocated_before)
assert line_1.variant.quantity == stock_1_quantity_before
assert line_2.variant.quantity == stock_2_quantity_before
assert line_1.quantity_fulfilled == 0
assert line_2.quantity_fulfilled == 0
def test_restock_fulfilled_order_lines(fulfilled_order):
line_1 = fulfilled_order.lines.first()
line_2 = fulfilled_order.lines.last()
stock_1_quantity_allocated_before = line_1.variant.quantity_allocated
stock_2_quantity_allocated_before = line_2.variant.quantity_allocated
stock_1_quantity_before = line_1.variant.quantity
stock_2_quantity_before = line_2.variant.quantity
restock_order_lines(fulfilled_order)
line_1.variant.refresh_from_db()
line_2.variant.refresh_from_db()
assert line_1.variant.quantity_allocated == (
stock_1_quantity_allocated_before)
assert line_2.variant.quantity_allocated == (
stock_2_quantity_allocated_before)
assert line_1.variant.quantity == stock_1_quantity_before + line_1.quantity
assert line_2.variant.quantity == stock_2_quantity_before + line_2.quantity
def test_restock_fulfillment_lines(fulfilled_order):
fulfillment = fulfilled_order.fulfillments.first()
line_1 = fulfillment.lines.first()
line_2 = fulfillment.lines.last()
stock_1 = line_1.order_line.variant
stock_2 = line_2.order_line.variant
stock_1_quantity_allocated_before = stock_1.quantity_allocated
stock_2_quantity_allocated_before = stock_2.quantity_allocated
stock_1_quantity_before = stock_1.quantity
stock_2_quantity_before = stock_2.quantity
restock_fulfillment_lines(fulfillment)
stock_1.refresh_from_db()
stock_2.refresh_from_db()
assert stock_1.quantity_allocated == (
stock_1_quantity_allocated_before + line_1.quantity)
assert stock_2.quantity_allocated == (
stock_2_quantity_allocated_before + line_2.quantity)
assert stock_1.quantity == stock_1_quantity_before + line_1.quantity
assert stock_2.quantity == stock_2_quantity_before + line_2.quantity
def test_cancel_order(fulfilled_order):
cancel_order(fulfilled_order, restock=False)
assert all([
f.status == FulfillmentStatus.CANCELED
for f in fulfilled_order.fulfillments.all()])
assert fulfilled_order.status == OrderStatus.CANCELED
def test_cancel_fulfillment(fulfilled_order):
fulfillment = fulfilled_order.fulfillments.first()
line_1 = fulfillment.lines.first()
line_2 = fulfillment.lines.first()
cancel_fulfillment(fulfillment, restock=False)
assert fulfillment.status == FulfillmentStatus.CANCELED
assert fulfilled_order.status == OrderStatus.UNFULFILLED
assert line_1.order_line.quantity_fulfilled == 0
assert line_2.order_line.quantity_fulfilled == 0
def test_update_order_status(fulfilled_order):
fulfillment = fulfilled_order.fulfillments.first()
line = fulfillment.lines.first()
order_line = line.order_line
order_line.quantity_fulfilled -= line.quantity
order_line.save()
line.delete()
update_order_status(fulfilled_order)
assert fulfilled_order.status == OrderStatus.PARTIALLY_FULFILLED
line = fulfillment.lines.first()
order_line = line.order_line
order_line.quantity_fulfilled -= line.quantity
order_line.save()
line.delete()
update_order_status(fulfilled_order)
assert fulfilled_order.status == OrderStatus.UNFULFILLED
def test_order_queryset_confirmed(draft_order):
other_orders = [
Order.objects.create(status=OrderStatus.UNFULFILLED),
Order.objects.create(status=OrderStatus.PARTIALLY_FULFILLED),
Order.objects.create(status=OrderStatus.FULFILLED),
Order.objects.create(status=OrderStatus.CANCELED)]
confirmed_orders = Order.objects.confirmed()
assert draft_order not in confirmed_orders
assert all([order in confirmed_orders for order in other_orders])
def test_order_queryset_drafts(draft_order):
other_orders = [
Order.objects.create(status=OrderStatus.UNFULFILLED),
Order.objects.create(status=OrderStatus.PARTIALLY_FULFILLED),
Order.objects.create(status=OrderStatus.FULFILLED),
Order.objects.create(status=OrderStatus.CANCELED)
]
draft_orders = Order.objects.drafts()
assert draft_order in draft_orders
assert all([order not in draft_orders for order in other_orders])
def test_order_queryset_to_ship():
total = TaxedMoney(net=Money(10, 'USD'), gross=Money(15, 'USD'))
orders_to_ship = [
Order.objects.create(status=OrderStatus.UNFULFILLED, total=total),
Order.objects.create(
status=OrderStatus.PARTIALLY_FULFILLED, total=total)
]
for order in orders_to_ship:
order.payments.create(
variant='default', status=PaymentStatus.CONFIRMED, currency='USD',
total=order.total_gross.amount,
captured_amount=order.total_gross.amount)
orders_not_to_ship = [
Order.objects.create(status=OrderStatus.DRAFT, total=total),
Order.objects.create(status=OrderStatus.UNFULFILLED, total=total),
Order.objects.create(
status=OrderStatus.PARTIALLY_FULFILLED, total=total),
Order.objects.create(status=OrderStatus.FULFILLED, total=total),
Order.objects.create(status=OrderStatus.CANCELED, total=total)
]
orders = Order.objects.to_ship()
assert all([order in orders for order in orders_to_ship])
assert all([order not in orders for order in orders_not_to_ship])
def test_update_order_prices(order_with_lines):
taxes = get_taxes_for_country(Country('DE'))
address = order_with_lines.shipping_address
address.country = 'DE'
address.save()
line_1 = order_with_lines.lines.first()
line_2 = order_with_lines.lines.last()
price_1 = line_1.variant.get_price(taxes=taxes)
price_2 = line_2.variant.get_price(taxes=taxes)
shipping_price = order_with_lines.shipping_method.get_total_price(taxes)
update_order_prices(order_with_lines, None)
line_1.refresh_from_db()
line_2.refresh_from_db()
assert line_1.unit_price == price_1
assert line_2.unit_price == price_2
assert order_with_lines.shipping_price == shipping_price
total = (
line_1.quantity * price_1 + line_2.quantity * price_2 + shipping_price)
assert order_with_lines.total == total
def test_order_payment_flow(
request_cart_with_item, client, address, shipping_zone):
request_cart_with_item.shipping_address = address
request_cart_with_item.billing_address = address.get_copy()
request_cart_with_item.email = 'test@example.com'
request_cart_with_item.shipping_method = (
shipping_zone.shipping_methods.first())
request_cart_with_item.save()
order = create_order(
request_cart_with_item, 'tracking_code', discounts=None, taxes=None)
# Select payment method
url = reverse('order:payment', kwargs={'token': order.token})
data = {'method': 'default'}
response = client.post(url, data, follow=True)
assert len(response.redirect_chain) == 1
assert response.status_code == 200
redirect_url = reverse(
'order:payment', kwargs={'token': order.token, 'variant': 'default'})
assert response.request['PATH_INFO'] == redirect_url
# Go to payment details page, enter payment data
data = {
'status': PaymentStatus.PREAUTH,
'fraud_status': FraudStatus.UNKNOWN,
'gateway_response': '3ds-disabled',
'verification_result': 'waiting'}
response = client.post(redirect_url, data)
assert response.status_code == 302
redirect_url = reverse(
'order:payment-success', kwargs={'token': order.token})
assert get_redirect_location(response) == redirect_url
# Complete payment, go to checkout success page
data = {'status': 'ok'}
response = client.post(redirect_url, data)
assert response.status_code == 302
redirect_url = reverse(
'order:checkout-success', kwargs={'token': order.token})
assert get_redirect_location(response) == redirect_url
# Assert that payment object was created and contains correct data
payment = order.payments.all()[0]
assert payment.total == order.total.gross.amount
assert payment.tax == order.total.tax.amount
assert payment.currency == order.total.currency
assert payment.delivery == order.shipping_price.net.amount
assert len(payment.get_purchased_items()) == len(order.lines.all())
def test_create_user_after_order(order, client):
order.user_email = 'hello@mirumee.com'
order.save()
url = reverse('order:checkout-success', kwargs={'token': order.token})
data = {'password': 'password'}
response = client.post(url, data)
redirect_url = reverse('order:details', kwargs={'token': order.token})
assert get_redirect_location(response) == redirect_url
user = User.objects.filter(email='hello@mirumee.com').first()
assert user is not None
assert user.orders.filter(token=order.token).exists()
def test_view_order_details(order, client):
url = reverse('order:details', kwargs={'token': order.token})
response = client.get(url)
assert response.status_code == 200
def test_add_order_note_view(order, authorized_client, customer_user):
order.user_email = customer_user.email
order.save()
url = reverse('order:details', kwargs={'token': order.token})
customer_note = 'bla-bla note'
data = {'customer_note': customer_note}
response = authorized_client.post(url, data)
redirect_url = reverse('order:details', kwargs={'token': order.token})
assert get_redirect_location(response) == redirect_url
order.refresh_from_db()
assert order.customer_note == customer_note
| 35.952096
| 79
| 0.744726
|
0e9120311fef52459a8719b5fb1e7b29f990801d
| 95,936
|
py
|
Python
|
elastalert/alerts.py
|
Nclose-ZA/elastalert
|
7016ce1aa0ea4cfa9cbd624f9eb4df3de39d2fd7
|
[
"Apache-2.0"
] | null | null | null |
elastalert/alerts.py
|
Nclose-ZA/elastalert
|
7016ce1aa0ea4cfa9cbd624f9eb4df3de39d2fd7
|
[
"Apache-2.0"
] | null | null | null |
elastalert/alerts.py
|
Nclose-ZA/elastalert
|
7016ce1aa0ea4cfa9cbd624f9eb4df3de39d2fd7
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import copy
import datetime
import json
import logging
import os
import re
import subprocess
import sys
import time
import uuid
import warnings
from email.mime.text import MIMEText
from email.utils import formatdate
from HTMLParser import HTMLParser
from smtplib import SMTP
from smtplib import SMTP_SSL
from smtplib import SMTPAuthenticationError
from smtplib import SMTPException
from socket import error
import boto3
import requests
import stomp
from exotel import Exotel
from jira.client import JIRA
from jira.exceptions import JIRAError
from requests.auth import HTTPProxyAuth
from requests.exceptions import RequestException
from staticconf.loader import yaml_loader
from texttable import Texttable
from thehive4py.api import TheHiveApi
from thehive4py.models import Alert
from thehive4py.models import AlertArtifact
from thehive4py.models import CustomFieldHelper
from twilio.base.exceptions import TwilioRestException
from twilio.rest import Client as TwilioClient
from util import EAException
from util import elastalert_logger
from util import lookup_es_key
from util import pretty_ts
from util import resolve_string
from util import ts_now
from util import ts_to_dt
class DateTimeEncoder(json.JSONEncoder):
def default(self, obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
else:
return json.JSONEncoder.default(self, obj)
class BasicMatchString(object):
""" Creates a string containing fields in match for the given rule. """
def __init__(self, rule, match):
self.rule = rule
self.match = match
def _ensure_new_line(self):
while self.text[-2:] != '\n\n':
self.text += '\n'
def _add_custom_alert_text(self):
missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
alert_text = unicode(self.rule.get('alert_text', ''))
if 'alert_text_args' in self.rule:
alert_text_args = self.rule.get('alert_text_args')
alert_text_values = [lookup_es_key(self.match, arg) for arg in alert_text_args]
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
for i, text_value in enumerate(alert_text_values):
if text_value is None:
alert_value = self.rule.get(alert_text_args[i])
if alert_value:
alert_text_values[i] = alert_value
alert_text_values = [missing if val is None else val for val in alert_text_values]
alert_text = alert_text.format(*alert_text_values)
elif 'alert_text_kw' in self.rule:
kw = {}
for name, kw_name in self.rule.get('alert_text_kw').items():
val = lookup_es_key(self.match, name)
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
if val is None:
val = self.rule.get(name)
kw[kw_name] = missing if val is None else val
alert_text = alert_text.format(**kw)
self.text += alert_text
def _add_rule_text(self):
self.text += self.rule['type'].get_match_str(self.match)
def _add_top_counts(self):
for key, counts in self.match.items():
if key.startswith('top_events_'):
self.text += '%s:\n' % (key[11:])
top_events = counts.items()
if not top_events:
self.text += 'No events found.\n'
else:
top_events.sort(key=lambda x: x[1], reverse=True)
for term, count in top_events:
self.text += '%s: %s\n' % (term, count)
self.text += '\n'
def _add_match_items(self):
match_items = self.match.items()
match_items.sort(key=lambda x: x[0])
for key, value in match_items:
if key.startswith('top_events_'):
continue
value_str = unicode(value)
value_str.replace('\\n', '\n')
if type(value) in [list, dict]:
try:
value_str = self._pretty_print_as_json(value)
except TypeError:
# Non serializable object, fallback to str
pass
self.text += '%s: %s\n' % (key, value_str)
def _pretty_print_as_json(self, blob):
try:
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, ensure_ascii=False)
except UnicodeDecodeError:
# This blob contains non-unicode, so lets pretend it's Latin-1 to show something
return json.dumps(blob, cls=DateTimeEncoder, sort_keys=True, indent=4, encoding='Latin-1', ensure_ascii=False)
def __str__(self):
self.text = ''
if 'alert_text' not in self.rule:
self.text += self.rule['name'] + '\n\n'
self._add_custom_alert_text()
self._ensure_new_line()
if self.rule.get('alert_text_type') != 'alert_text_only':
self._add_rule_text()
self._ensure_new_line()
if self.rule.get('top_count_keys'):
self._add_top_counts()
if self.rule.get('alert_text_type') != 'exclude_fields':
self._add_match_items()
return self.text
class JiraFormattedMatchString(BasicMatchString):
def _add_match_items(self):
match_items = dict([(x, y) for x, y in self.match.items() if not x.startswith('top_events_')])
json_blob = self._pretty_print_as_json(match_items)
preformatted_text = u'{{code}}{0}{{code}}'.format(json_blob)
self.text += preformatted_text
class Alerter(object):
""" Base class for types of alerts.
:param rule: The rule configuration.
"""
required_options = frozenset([])
def __init__(self, rule):
self.rule = rule
# pipeline object is created by ElastAlerter.send_alert()
# and attached to each alerters used by a rule before calling alert()
self.pipeline = None
self.resolve_rule_references(self.rule)
def resolve_rule_references(self, root):
# Support referencing other top-level rule properties to avoid redundant copy/paste
if type(root) == list:
# Make a copy since we may be modifying the contents of the structure we're walking
for i, item in enumerate(copy.copy(root)):
if type(item) == dict or type(item) == list:
self.resolve_rule_references(root[i])
else:
root[i] = self.resolve_rule_reference(item)
elif type(root) == dict:
# Make a copy since we may be modifying the contents of the structure we're walking
for key, value in root.copy().iteritems():
if type(value) == dict or type(value) == list:
self.resolve_rule_references(root[key])
else:
root[key] = self.resolve_rule_reference(value)
def resolve_rule_reference(self, value):
strValue = unicode(value)
if strValue.startswith('$') and strValue.endswith('$') and strValue[1:-1] in self.rule:
if type(value) == int:
return int(self.rule[strValue[1:-1]])
else:
return self.rule[strValue[1:-1]]
else:
return value
def alert(self, match):
""" Send an alert. Match is a dictionary of information about the alert.
:param match: A dictionary of relevant information to the alert.
"""
raise NotImplementedError()
def get_info(self):
""" Returns a dictionary of data related to this alert. At minimum, this should contain
a field type corresponding to the type of Alerter. """
return {'type': 'Unknown'}
def create_title(self, matches):
""" Creates custom alert title to be used, e.g. as an e-mail subject or JIRA issue summary.
:param matches: A list of dictionaries of relevant information to the alert.
"""
if 'alert_subject' in self.rule:
return self.create_custom_title(matches)
return self.create_default_title(matches)
def create_custom_title(self, matches):
alert_subject = unicode(self.rule['alert_subject'])
alert_subject_max_len = int(self.rule.get('alert_subject_max_len', 2048))
if 'alert_subject_args' in self.rule:
alert_subject_args = self.rule['alert_subject_args']
alert_subject_values = [lookup_es_key(matches[0], arg) for arg in alert_subject_args]
# Support referencing other top-level rule properties
# This technically may not work if there is a top-level rule property with the same name
# as an es result key, since it would have been matched in the lookup_es_key call above
for i, subject_value in enumerate(alert_subject_values):
if subject_value is None:
alert_value = self.rule.get(alert_subject_args[i])
if alert_value:
alert_subject_values[i] = alert_value
missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
alert_subject_values = [missing if val is None else val for val in alert_subject_values]
alert_subject = alert_subject.format(*alert_subject_values)
if len(alert_subject) > alert_subject_max_len:
alert_subject = alert_subject[:alert_subject_max_len]
return alert_subject
def create_alert_body(self, matches):
body = self.get_aggregation_summary_text(matches)
if self.rule.get('alert_text_type') != 'aggregation_summary_only':
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text__maximum_width(self):
"""Get maximum width allowed for summary text."""
return 80
def get_aggregation_summary_text(self, matches):
text = ''
if 'aggregation' in self.rule and 'summary_table_fields' in self.rule:
text = self.rule.get('summary_prefix', '')
summary_table_fields = self.rule['summary_table_fields']
if not isinstance(summary_table_fields, list):
summary_table_fields = [summary_table_fields]
# Include a count aggregation so that we can see at a glance how many of each aggregation_key were encountered
summary_table_fields_with_count = summary_table_fields + ['count']
text += "Aggregation resulted in the following data for summary_table_fields ==> {0}:\n\n".format(
summary_table_fields_with_count
)
text_table = Texttable(max_width=self.get_aggregation_summary_text__maximum_width())
text_table.header(summary_table_fields_with_count)
# Format all fields as 'text' to avoid long numbers being shown as scientific notation
text_table.set_cols_dtype(['t' for i in summary_table_fields_with_count])
match_aggregation = {}
# Maintain an aggregate count for each unique key encountered in the aggregation period
for match in matches:
key_tuple = tuple([unicode(lookup_es_key(match, key)) for key in summary_table_fields])
if key_tuple not in match_aggregation:
match_aggregation[key_tuple] = 1
else:
match_aggregation[key_tuple] = match_aggregation[key_tuple] + 1
for keys, count in match_aggregation.iteritems():
text_table.add_row([key for key in keys] + [count])
text += text_table.draw() + '\n\n'
text += self.rule.get('summary_prefix', '')
return unicode(text)
def create_default_title(self, matches):
return self.rule['name']
def get_account(self, account_file):
""" Gets the username and password from an account file.
:param account_file: Path to the file which contains user and password information.
It can be either an absolute file path or one that is relative to the given rule.
"""
if os.path.isabs(account_file):
account_file_path = account_file
else:
account_file_path = os.path.join(os.path.dirname(self.rule['rule_file']), account_file)
account_conf = yaml_loader(account_file_path)
if 'user' not in account_conf or 'password' not in account_conf:
raise EAException('Account file must have user and password fields')
self.user = account_conf['user']
self.password = account_conf['password']
class StompAlerter(Alerter):
""" The stomp alerter publishes alerts via stomp to a broker. """
required_options = frozenset(
['stomp_hostname', 'stomp_hostport', 'stomp_login', 'stomp_password'])
def alert(self, matches):
alerts = []
qk = self.rule.get('query_key', None)
fullmessage = {}
for match in matches:
if qk is not None:
resmatch = lookup_es_key(match, qk)
else:
resmatch = None
if resmatch is not None:
elastalert_logger.info(
'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key(match, self.rule['timestamp_field'])))
alerts.append(
'Alert for %s, %s at %s:' % (self.rule['name'], resmatch, lookup_es_key(
match, self.rule['timestamp_field']))
)
fullmessage['match'] = resmatch
else:
elastalert_logger.info('Rule %s generated an alert at %s:' % (
self.rule['name'], lookup_es_key(match, self.rule['timestamp_field'])))
alerts.append(
'Rule %s generated an alert at %s:' % (self.rule['name'], lookup_es_key(
match, self.rule['timestamp_field']))
)
fullmessage['match'] = lookup_es_key(
match, self.rule['timestamp_field'])
elastalert_logger.info(unicode(BasicMatchString(self.rule, match)))
fullmessage['alerts'] = alerts
fullmessage['rule'] = self.rule['name']
fullmessage['rule_file'] = self.rule['rule_file']
fullmessage['matching'] = unicode(BasicMatchString(self.rule, match))
fullmessage['alertDate'] = datetime.datetime.now(
).strftime("%Y-%m-%d %H:%M:%S")
fullmessage['body'] = self.create_alert_body(matches)
fullmessage['matches'] = matches
self.stomp_hostname = self.rule.get('stomp_hostname', 'localhost')
self.stomp_hostport = self.rule.get('stomp_hostport', '61613')
self.stomp_login = self.rule.get('stomp_login', 'admin')
self.stomp_password = self.rule.get('stomp_password', 'admin')
self.stomp_destination = self.rule.get(
'stomp_destination', '/queue/ALERT')
self.stomp_ssl = self.rule.get('stomp_ssl', False)
conn = stomp.Connection([(self.stomp_hostname, self.stomp_hostport)], use_ssl=self.stomp_ssl)
conn.start()
conn.connect(self.stomp_login, self.stomp_password)
# Ensures that the CONNECTED frame is received otherwise, the disconnect call will fail.
time.sleep(1)
conn.send(self.stomp_destination, json.dumps(fullmessage))
conn.disconnect()
def get_info(self):
return {'type': 'stomp'}
class DebugAlerter(Alerter):
""" The debug alerter uses a Python logger (by default, alerting to terminal). """
def alert(self, matches):
qk = self.rule.get('query_key', None)
for match in matches:
if qk in match:
elastalert_logger.info(
'Alert for %s, %s at %s:' % (self.rule['name'], match[qk], lookup_es_key(match, self.rule['timestamp_field'])))
else:
elastalert_logger.info('Alert for %s at %s:' % (self.rule['name'], lookup_es_key(match, self.rule['timestamp_field'])))
elastalert_logger.info(unicode(BasicMatchString(self.rule, match)))
def get_info(self):
return {'type': 'debug'}
class EmailAlerter(Alerter):
""" Sends an email alert """
required_options = frozenset(['email'])
def __init__(self, *args):
super(EmailAlerter, self).__init__(*args)
self.smtp_host = self.rule.get('smtp_host', 'localhost')
self.smtp_ssl = self.rule.get('smtp_ssl', False)
self.from_addr = self.rule.get('from_addr', 'ElastAlert')
self.smtp_port = self.rule.get('smtp_port')
if self.rule.get('smtp_auth_file'):
self.get_account(self.rule['smtp_auth_file'])
self.smtp_key_file = self.rule.get('smtp_key_file')
self.smtp_cert_file = self.rule.get('smtp_cert_file')
# Convert email to a list if it isn't already
if isinstance(self.rule['email'], basestring):
self.rule['email'] = [self.rule['email']]
# If there is a cc then also convert it a list if it isn't
cc = self.rule.get('cc')
if cc and isinstance(cc, basestring):
self.rule['cc'] = [self.rule['cc']]
# If there is a bcc then also convert it to a list if it isn't
bcc = self.rule.get('bcc')
if bcc and isinstance(bcc, basestring):
self.rule['bcc'] = [self.rule['bcc']]
add_suffix = self.rule.get('email_add_domain')
if add_suffix and not add_suffix.startswith('@'):
self.rule['email_add_domain'] = '@' + add_suffix
def alert(self, matches):
body = self.create_alert_body(matches)
# Add JIRA ticket if it exists
if self.pipeline is not None and 'jira_ticket' in self.pipeline:
url = '%s/browse/%s' % (self.pipeline['jira_server'], self.pipeline['jira_ticket'])
body += '\nJIRA ticket: %s' % (url)
to_addr = self.rule['email']
if 'email_from_field' in self.rule:
recipient = lookup_es_key(matches[0], self.rule['email_from_field'])
if isinstance(recipient, basestring):
if '@' in recipient:
to_addr = [recipient]
elif 'email_add_domain' in self.rule:
to_addr = [recipient + self.rule['email_add_domain']]
elif isinstance(recipient, list):
to_addr = recipient
if 'email_add_domain' in self.rule:
to_addr = [name + self.rule['email_add_domain'] for name in to_addr]
if self.rule.get('email_format') == 'html':
email_msg = MIMEText(body.encode('UTF-8'), 'html', _charset='UTF-8')
else:
email_msg = MIMEText(body.encode('UTF-8'), _charset='UTF-8')
email_msg['Subject'] = self.create_title(matches)
email_msg['To'] = ', '.join(to_addr)
email_msg['From'] = self.from_addr
email_msg['Reply-To'] = self.rule.get('email_reply_to', email_msg['To'])
email_msg['Date'] = formatdate()
if self.rule.get('cc'):
email_msg['CC'] = ','.join(self.rule['cc'])
to_addr = to_addr + self.rule['cc']
if self.rule.get('bcc'):
to_addr = to_addr + self.rule['bcc']
try:
if self.smtp_ssl:
if self.smtp_port:
self.smtp = SMTP_SSL(self.smtp_host, self.smtp_port, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
else:
self.smtp = SMTP_SSL(self.smtp_host, keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
else:
if self.smtp_port:
self.smtp = SMTP(self.smtp_host, self.smtp_port)
else:
self.smtp = SMTP(self.smtp_host)
self.smtp.ehlo()
if self.smtp.has_extn('STARTTLS'):
self.smtp.starttls(keyfile=self.smtp_key_file, certfile=self.smtp_cert_file)
if 'smtp_auth_file' in self.rule:
self.smtp.login(self.user, self.password)
except (SMTPException, error) as e:
raise EAException("Error connecting to SMTP host: %s" % (e))
except SMTPAuthenticationError as e:
raise EAException("SMTP username/password rejected: %s" % (e))
self.smtp.sendmail(self.from_addr, to_addr, email_msg.as_string())
self.smtp.close()
elastalert_logger.info("Sent email to %s" % (to_addr))
def create_default_title(self, matches):
subject = 'ElastAlert: %s' % (self.rule['name'])
# If the rule has a query_key, add that value plus timestamp to subject
if 'query_key' in self.rule:
qk = matches[0].get(self.rule['query_key'])
if qk:
subject += ' - %s' % (qk)
return subject
def get_info(self):
return {'type': 'email',
'recipients': self.rule['email']}
class JiraAlerter(Alerter):
""" Creates a Jira ticket for each alert """
required_options = frozenset(['jira_server', 'jira_account_file', 'jira_project', 'jira_issuetype'])
# Maintain a static set of built-in fields that we explicitly know how to set
# For anything else, we will do best-effort and try to set a string value
known_field_list = [
'jira_account_file',
'jira_assignee',
'jira_bump_after_inactivity',
'jira_bump_in_statuses',
'jira_bump_not_in_statuses',
'jira_bump_only',
'jira_bump_tickets',
'jira_component',
'jira_components',
'jira_description',
'jira_ignore_in_title',
'jira_issuetype',
'jira_label',
'jira_labels',
'jira_max_age',
'jira_priority',
'jira_project',
'jira_server',
'jira_transition_to',
'jira_watchers',
]
# Some built-in jira types that can be used as custom fields require special handling
# Here is a sample of one of them:
# {"id":"customfield_12807","name":"My Custom Field","custom":true,"orderable":true,"navigable":true,"searchable":true,
# "clauseNames":["cf[12807]","My Custom Field"],"schema":{"type":"array","items":"string",
# "custom":"com.atlassian.jira.plugin.system.customfieldtypes:multiselect","customId":12807}}
# There are likely others that will need to be updated on a case-by-case basis
custom_string_types_with_special_handling = [
'com.atlassian.jira.plugin.system.customfieldtypes:multicheckboxes',
'com.atlassian.jira.plugin.system.customfieldtypes:multiselect',
'com.atlassian.jira.plugin.system.customfieldtypes:radiobuttons',
]
def __init__(self, rule):
super(JiraAlerter, self).__init__(rule)
self.server = self.rule['jira_server']
self.get_account(self.rule['jira_account_file'])
self.project = self.rule['jira_project']
self.issue_type = self.rule['jira_issuetype']
# Deferred settings refer to values that can only be resolved when a match
# is found and as such loading them will be delayed until we find a match
self.deferred_settings = []
# We used to support only a single component. This allows us to maintain backwards compatibility
# while also giving the user-facing API a more representative name
self.components = self.rule.get('jira_components', self.rule.get('jira_component'))
# We used to support only a single label. This allows us to maintain backwards compatibility
# while also giving the user-facing API a more representative name
self.labels = self.rule.get('jira_labels', self.rule.get('jira_label'))
self.description = self.rule.get('jira_description', '')
self.assignee = self.rule.get('jira_assignee')
self.max_age = self.rule.get('jira_max_age', 30)
self.priority = self.rule.get('jira_priority')
self.bump_tickets = self.rule.get('jira_bump_tickets', False)
self.bump_not_in_statuses = self.rule.get('jira_bump_not_in_statuses')
self.bump_in_statuses = self.rule.get('jira_bump_in_statuses')
self.bump_after_inactivity = self.rule.get('jira_bump_after_inactivity', 0)
self.bump_only = self.rule.get('jira_bump_only', False)
self.transition = self.rule.get('jira_transition_to', False)
self.watchers = self.rule.get('jira_watchers')
self.client = None
if self.bump_in_statuses and self.bump_not_in_statuses:
msg = 'Both jira_bump_in_statuses (%s) and jira_bump_not_in_statuses (%s) are set.' % \
(','.join(self.bump_in_statuses), ','.join(self.bump_not_in_statuses))
intersection = list(set(self.bump_in_statuses) & set(self.bump_in_statuses))
if intersection:
msg = '%s Both have common statuses of (%s). As such, no tickets will ever be found.' % (
msg, ','.join(intersection))
msg += ' This should be simplified to use only one or the other.'
logging.warning(msg)
self.reset_jira_args()
try:
self.client = JIRA(self.server, basic_auth=(self.user, self.password))
self.get_priorities()
self.jira_fields = self.client.fields()
self.get_arbitrary_fields()
except JIRAError as e:
# JIRAError may contain HTML, pass along only first 1024 chars
raise EAException("Error connecting to JIRA: %s" % (str(e)[:1024])), None, sys.exc_info()[2]
self.set_priority()
def set_priority(self):
try:
if self.priority is not None and self.client is not None:
self.jira_args['priority'] = {'id': self.priority_ids[self.priority]}
except KeyError:
logging.error("Priority %s not found. Valid priorities are %s" % (self.priority, self.priority_ids.keys()))
def reset_jira_args(self):
self.jira_args = {'project': {'key': self.project},
'issuetype': {'name': self.issue_type}}
if self.components:
# Support single component or list
if type(self.components) != list:
self.jira_args['components'] = [{'name': self.components}]
else:
self.jira_args['components'] = [{'name': component} for component in self.components]
if self.labels:
# Support single label or list
if type(self.labels) != list:
self.labels = [self.labels]
self.jira_args['labels'] = self.labels
if self.watchers:
# Support single watcher or list
if type(self.watchers) != list:
self.watchers = [self.watchers]
if self.assignee:
self.jira_args['assignee'] = {'name': self.assignee}
self.set_priority()
def set_jira_arg(self, jira_field, value, fields):
# Remove the jira_ part. Convert underscores to spaces
normalized_jira_field = jira_field[5:].replace('_', ' ').lower()
# All jira fields should be found in the 'id' or the 'name' field. Therefore, try both just in case
for identifier in ['name', 'id']:
field = next((f for f in fields if normalized_jira_field == f[identifier].replace('_', ' ').lower()), None)
if field:
break
if not field:
# Log a warning to ElastAlert saying that we couldn't find that type?
# OR raise and fail to load the alert entirely? Probably the latter...
raise Exception("Could not find a definition for the jira field '{0}'".format(normalized_jira_field))
arg_name = field['id']
# Check the schema information to decide how to set the value correctly
# If the schema information is not available, raise an exception since we don't know how to set it
# Note this is only the case for two built-in types, id: issuekey and id: thumbnail
if not ('schema' in field or 'type' in field['schema']):
raise Exception("Could not determine schema information for the jira field '{0}'".format(normalized_jira_field))
arg_type = field['schema']['type']
# Handle arrays of simple types like strings or numbers
if arg_type == 'array':
# As a convenience, support the scenario wherein the user only provides
# a single value for a multi-value field e.g. jira_labels: Only_One_Label
if type(value) != list:
value = [value]
array_items = field['schema']['items']
# Simple string types
if array_items in ['string', 'date', 'datetime']:
# Special case for multi-select custom types (the JIRA metadata says that these are strings, but
# in reality, they are required to be provided as an object.
if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling:
self.jira_args[arg_name] = [{'value': v} for v in value]
else:
self.jira_args[arg_name] = value
elif array_items == 'number':
self.jira_args[arg_name] = [int(v) for v in value]
# Also attempt to handle arrays of complex types that have to be passed as objects with an identifier 'key'
elif array_items == 'option':
self.jira_args[arg_name] = [{'value': v} for v in value]
else:
# Try setting it as an object, using 'name' as the key
# This may not work, as the key might actually be 'key', 'id', 'value', or something else
# If it works, great! If not, it will manifest itself as an API error that will bubble up
self.jira_args[arg_name] = [{'name': v} for v in value]
# Handle non-array types
else:
# Simple string types
if arg_type in ['string', 'date', 'datetime']:
# Special case for custom types (the JIRA metadata says that these are strings, but
# in reality, they are required to be provided as an object.
if 'custom' in field['schema'] and field['schema']['custom'] in self.custom_string_types_with_special_handling:
self.jira_args[arg_name] = {'value': value}
else:
self.jira_args[arg_name] = value
# Number type
elif arg_type == 'number':
self.jira_args[arg_name] = int(value)
elif arg_type == 'option':
self.jira_args[arg_name] = {'value': value}
# Complex type
else:
self.jira_args[arg_name] = {'name': value}
def get_arbitrary_fields(self):
# Clear jira_args
self.reset_jira_args()
for jira_field, value in self.rule.iteritems():
# If we find a field that is not covered by the set that we are aware of, it means it is either:
# 1. A built-in supported field in JIRA that we don't have on our radar
# 2. A custom field that a JIRA admin has configured
if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] != '#':
self.set_jira_arg(jira_field, value, self.jira_fields)
if jira_field.startswith('jira_') and jira_field not in self.known_field_list and str(value)[:1] == '#':
self.deferred_settings.append(jira_field)
def get_priorities(self):
""" Creates a mapping of priority index to id. """
priorities = self.client.priorities()
self.priority_ids = {}
for x in range(len(priorities)):
self.priority_ids[x] = priorities[x].id
def set_assignee(self, assignee):
self.assignee = assignee
if assignee:
self.jira_args['assignee'] = {'name': assignee}
elif 'assignee' in self.jira_args:
self.jira_args.pop('assignee')
def find_existing_ticket(self, matches):
# Default title, get stripped search version
if 'alert_subject' not in self.rule:
title = self.create_default_title(matches, True)
else:
title = self.create_title(matches)
if 'jira_ignore_in_title' in self.rule:
title = title.replace(matches[0].get(self.rule['jira_ignore_in_title'], ''), '')
# This is necessary for search to work. Other special characters and dashes
# directly adjacent to words appear to be ok
title = title.replace(' - ', ' ')
title = title.replace('\\', '\\\\')
date = (datetime.datetime.now() - datetime.timedelta(days=self.max_age)).strftime('%Y-%m-%d')
jql = 'project=%s AND summary~"%s" and created >= "%s"' % (self.project, title, date)
if self.bump_in_statuses:
jql = '%s and status in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status for status
in self.bump_in_statuses]))
if self.bump_not_in_statuses:
jql = '%s and status not in (%s)' % (jql, ','.join(["\"%s\"" % status if ' ' in status else status
for status in self.bump_not_in_statuses]))
try:
issues = self.client.search_issues(jql)
except JIRAError as e:
logging.exception("Error while searching for JIRA ticket using jql '%s': %s" % (jql, e))
return None
if len(issues):
return issues[0]
def comment_on_ticket(self, ticket, match):
text = unicode(JiraFormattedMatchString(self.rule, match))
timestamp = pretty_ts(lookup_es_key(match, self.rule['timestamp_field']))
comment = "This alert was triggered again at %s\n%s" % (timestamp, text)
self.client.add_comment(ticket, comment)
def transition_ticket(self, ticket):
transitions = self.client.transitions(ticket)
for t in transitions:
if t['name'] == self.transition:
self.client.transition_issue(ticket, t['id'])
def alert(self, matches):
# Reset arbitrary fields to pick up changes
self.get_arbitrary_fields()
if len(self.deferred_settings) > 0:
fields = self.client.fields()
for jira_field in self.deferred_settings:
value = lookup_es_key(matches[0], self.rule[jira_field][1:])
self.set_jira_arg(jira_field, value, fields)
title = self.create_title(matches)
if self.bump_tickets:
ticket = self.find_existing_ticket(matches)
if ticket:
inactivity_datetime = ts_now() - datetime.timedelta(days=self.bump_after_inactivity)
if ts_to_dt(ticket.fields.updated) >= inactivity_datetime:
if self.pipeline is not None:
self.pipeline['jira_ticket'] = None
self.pipeline['jira_server'] = self.server
return None
elastalert_logger.info('Commenting on existing ticket %s' % (ticket.key))
for match in matches:
try:
self.comment_on_ticket(ticket, match)
except JIRAError as e:
logging.exception("Error while commenting on ticket %s: %s" % (ticket, e))
if self.labels:
for l in self.labels:
try:
ticket.fields.labels.append(l)
except JIRAError as e:
logging.exception("Error while appending labels to ticket %s: %s" % (ticket, e))
if self.transition:
elastalert_logger.info('Transitioning existing ticket %s' % (ticket.key))
try:
self.transition_ticket(ticket)
except JIRAError as e:
logging.exception("Error while transitioning ticket %s: %s" % (ticket, e))
if self.pipeline is not None:
self.pipeline['jira_ticket'] = ticket
self.pipeline['jira_server'] = self.server
return None
if self.bump_only:
return None
self.jira_args['summary'] = title
self.jira_args['description'] = self.create_alert_body(matches)
try:
self.issue = self.client.create_issue(**self.jira_args)
# You can not add watchers on initial creation. Only as a follow-up action
if self.watchers:
for watcher in self.watchers:
try:
self.client.add_watcher(self.issue.key, watcher)
except Exception as ex:
# Re-raise the exception, preserve the stack-trace, and give some
# context as to which watcher failed to be added
raise Exception(
"Exception encountered when trying to add '{0}' as a watcher. Does the user exist?\n{1}" .format(
watcher,
ex
)), None, sys.exc_info()[2]
except JIRAError as e:
raise EAException("Error creating JIRA ticket using jira_args (%s): %s" % (self.jira_args, e))
elastalert_logger.info("Opened Jira ticket: %s" % (self.issue))
if self.pipeline is not None:
self.pipeline['jira_ticket'] = self.issue
self.pipeline['jira_server'] = self.server
def create_alert_body(self, matches):
body = self.description + '\n'
body += self.get_aggregation_summary_text(matches)
if self.rule.get('alert_text_type') != 'aggregation_summary_only':
for match in matches:
body += unicode(JiraFormattedMatchString(self.rule, match))
if len(matches) > 1:
body += '\n----------------------------------------\n'
return body
def get_aggregation_summary_text(self, matches):
text = super(JiraAlerter, self).get_aggregation_summary_text(matches)
if text:
text = u'{{noformat}}{0}{{noformat}}'.format(text)
return text
def create_default_title(self, matches, for_search=False):
# If there is a query_key, use that in the title
if 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']):
title = 'ElastAlert: %s matched %s' % (lookup_es_key(matches[0], self.rule['query_key']), self.rule['name'])
else:
title = 'ElastAlert: %s' % (self.rule['name'])
if for_search:
return title
title += ' - %s' % (pretty_ts(matches[0][self.rule['timestamp_field']], self.rule.get('use_local_time')))
# Add count for spikes
count = matches[0].get('spike_count')
if count:
title += ' - %s+ events' % (count)
return title
def get_info(self):
return {'type': 'jira'}
class CommandAlerter(Alerter):
required_options = set(['command'])
def __init__(self, *args):
super(CommandAlerter, self).__init__(*args)
self.last_command = []
self.shell = False
if isinstance(self.rule['command'], basestring):
self.shell = True
if '%' in self.rule['command']:
logging.warning('Warning! You could be vulnerable to shell injection!')
self.rule['command'] = [self.rule['command']]
self.new_style_string_format = False
if 'new_style_string_format' in self.rule and self.rule['new_style_string_format']:
self.new_style_string_format = True
def alert(self, matches):
# Format the command and arguments
try:
command = [resolve_string(command_arg, matches[0]) for command_arg in self.rule['command']]
self.last_command = command
except KeyError as e:
raise EAException("Error formatting command: %s" % (e))
# Run command and pipe data
try:
subp = subprocess.Popen(command, stdin=subprocess.PIPE, shell=self.shell)
if self.rule.get('pipe_match_json'):
match_json = json.dumps(matches, cls=DateTimeEncoder) + '\n'
stdout, stderr = subp.communicate(input=match_json)
elif self.rule.get('pipe_alert_text'):
alert_text = self.create_alert_body(matches)
stdout, stderr = subp.communicate(input=alert_text)
if self.rule.get("fail_on_non_zero_exit", False) and subp.wait():
raise EAException("Non-zero exit code while running command %s" % (' '.join(command)))
except OSError as e:
raise EAException("Error while running command %s: %s" % (' '.join(command), e))
def get_info(self):
return {'type': 'command',
'command': ' '.join(self.last_command)}
class SnsAlerter(Alerter):
""" Send alert using AWS SNS service """
required_options = frozenset(['sns_topic_arn'])
def __init__(self, *args):
super(SnsAlerter, self).__init__(*args)
self.sns_topic_arn = self.rule.get('sns_topic_arn', '')
self.aws_access_key_id = self.rule.get('aws_access_key_id')
self.aws_secret_access_key = self.rule.get('aws_secret_access_key')
self.aws_region = self.rule.get('aws_region', 'us-east-1')
self.profile = self.rule.get('boto_profile', None) # Deprecated
self.profile = self.rule.get('aws_profile', None)
def create_default_title(self, matches):
subject = 'ElastAlert: %s' % (self.rule['name'])
return subject
def alert(self, matches):
body = self.create_alert_body(matches)
session = boto3.Session(
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
region_name=self.aws_region,
profile_name=self.profile
)
sns_client = session.client('sns')
sns_client.publish(
TopicArn=self.sns_topic_arn,
Message=body,
Subject=self.create_title(matches)
)
elastalert_logger.info("Sent sns notification to %s" % (self.sns_topic_arn))
class HipChatAlerter(Alerter):
""" Creates a HipChat room notification for each alert """
required_options = frozenset(['hipchat_auth_token', 'hipchat_room_id'])
def __init__(self, rule):
super(HipChatAlerter, self).__init__(rule)
self.hipchat_msg_color = self.rule.get('hipchat_msg_color', 'red')
self.hipchat_message_format = self.rule.get('hipchat_message_format', 'html')
self.hipchat_auth_token = self.rule['hipchat_auth_token']
self.hipchat_room_id = self.rule['hipchat_room_id']
self.hipchat_domain = self.rule.get('hipchat_domain', 'api.hipchat.com')
self.hipchat_ignore_ssl_errors = self.rule.get('hipchat_ignore_ssl_errors', False)
self.hipchat_notify = self.rule.get('hipchat_notify', True)
self.hipchat_from = self.rule.get('hipchat_from', '')
self.url = 'https://%s/v2/room/%s/notification?auth_token=%s' % (
self.hipchat_domain, self.hipchat_room_id, self.hipchat_auth_token)
self.hipchat_proxy = self.rule.get('hipchat_proxy', None)
def create_alert_body(self, matches):
body = super(HipChatAlerter, self).create_alert_body(matches)
# HipChat sends 400 bad request on messages longer than 10000 characters
if self.hipchat_message_format == 'html':
# Use appropriate line ending for text/html
br = '<br/>'
body = body.replace('\n', br)
truncated_message = '<br/> ...(truncated)'
truncate_to = 10000 - len(truncated_message)
else:
truncated_message = '..(truncated)'
truncate_to = 10000 - len(truncated_message)
if (len(body) > 9999):
body = body[:truncate_to] + truncated_message
return body
def alert(self, matches):
body = self.create_alert_body(matches)
# Post to HipChat
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.hipchat_proxy} if self.hipchat_proxy else None
payload = {
'color': self.hipchat_msg_color,
'message': body,
'message_format': self.hipchat_message_format,
'notify': self.hipchat_notify,
'from': self.hipchat_from
}
try:
if self.hipchat_ignore_ssl_errors:
requests.packages.urllib3.disable_warnings()
if self.rule.get('hipchat_mentions', []):
ping_users = self.rule.get('hipchat_mentions', [])
ping_msg = payload.copy()
ping_msg['message'] = "ping {}".format(
", ".join("@{}".format(user) for user in ping_users)
)
ping_msg['message_format'] = "text"
response = requests.post(
self.url,
data=json.dumps(ping_msg, cls=DateTimeEncoder),
headers=headers,
verify=not self.hipchat_ignore_ssl_errors,
proxies=proxies)
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers,
verify=not self.hipchat_ignore_ssl_errors,
proxies=proxies)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to HipChat: %s" % e)
elastalert_logger.info("Alert sent to HipChat room %s" % self.hipchat_room_id)
def get_info(self):
return {'type': 'hipchat',
'hipchat_room_id': self.hipchat_room_id}
class MsTeamsAlerter(Alerter):
""" Creates a Microsoft Teams Conversation Message for each alert """
required_options = frozenset(['ms_teams_webhook_url', 'ms_teams_alert_summary'])
def __init__(self, rule):
super(MsTeamsAlerter, self).__init__(rule)
self.ms_teams_webhook_url = self.rule['ms_teams_webhook_url']
if isinstance(self.ms_teams_webhook_url, basestring):
self.ms_teams_webhook_url = [self.ms_teams_webhook_url]
self.ms_teams_proxy = self.rule.get('ms_teams_proxy', None)
self.ms_teams_alert_summary = self.rule.get('ms_teams_alert_summary', 'ElastAlert Message')
self.ms_teams_alert_fixed_width = self.rule.get('ms_teams_alert_fixed_width', False)
self.ms_teams_theme_color = self.rule.get('ms_teams_theme_color', '')
def format_body(self, body):
body = body.encode('UTF-8')
if self.ms_teams_alert_fixed_width:
body = body.replace('`', "'")
body = "```{0}```".format('```\n\n```'.join(x for x in body.split('\n'))).replace('\n``````', '')
return body
def alert(self, matches):
body = self.create_alert_body(matches)
body = self.format_body(body)
# post to Teams
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.ms_teams_proxy} if self.ms_teams_proxy else None
payload = {
'@type': 'MessageCard',
'@context': 'http://schema.org/extensions',
'summary': self.ms_teams_alert_summary,
'title': self.create_title(matches),
'text': body
}
if self.ms_teams_theme_color != '':
payload['themeColor'] = self.ms_teams_theme_color
for url in self.ms_teams_webhook_url:
try:
response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to ms teams: %s" % e)
elastalert_logger.info("Alert sent to MS Teams")
def get_info(self):
return {'type': 'ms_teams',
'ms_teams_webhook_url': self.ms_teams_webhook_url}
class SlackAlerter(Alerter):
""" Creates a Slack room message for each alert """
required_options = frozenset(['slack_webhook_url'])
def __init__(self, rule):
super(SlackAlerter, self).__init__(rule)
self.slack_webhook_url = self.rule['slack_webhook_url']
if isinstance(self.slack_webhook_url, basestring):
self.slack_webhook_url = [self.slack_webhook_url]
self.slack_proxy = self.rule.get('slack_proxy', None)
self.slack_username_override = self.rule.get('slack_username_override', 'elastalert')
self.slack_channel_override = self.rule.get('slack_channel_override', '')
if isinstance(self.slack_channel_override, basestring):
self.slack_channel_override = [self.slack_channel_override]
self.slack_title_link = self.rule.get('slack_title_link', '')
self.slack_title = self.rule.get('slack_title', '')
self.slack_emoji_override = self.rule.get('slack_emoji_override', ':ghost:')
self.slack_icon_url_override = self.rule.get('slack_icon_url_override', '')
self.slack_msg_color = self.rule.get('slack_msg_color', 'danger')
self.slack_parse_override = self.rule.get('slack_parse_override', 'none')
self.slack_text_string = self.rule.get('slack_text_string', '')
self.slack_alert_fields = self.rule.get('slack_alert_fields', '')
self.slack_ignore_ssl_errors = self.rule.get('slack_ignore_ssl_errors', False)
self.slack_timeout = self.rule.get('slack_timeout', 10)
def format_body(self, body):
# https://api.slack.com/docs/formatting
return body.encode('UTF-8')
def get_aggregation_summary_text__maximum_width(self):
width = super(SlackAlerter, self).get_aggregation_summary_text__maximum_width()
# Reduced maximum width for prettier Slack display.
return min(width, 75)
def get_aggregation_summary_text(self, matches):
text = super(SlackAlerter, self).get_aggregation_summary_text(matches)
if text:
text = u'```\n{0}```\n'.format(text)
return text
def populate_fields(self, matches):
alert_fields = []
for arg in self.slack_alert_fields:
arg = copy.copy(arg)
arg['value'] = lookup_es_key(matches[0], arg['value'])
alert_fields.append(arg)
return alert_fields
def alert(self, matches):
body = self.create_alert_body(matches)
body = self.format_body(body)
# post to slack
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.slack_proxy} if self.slack_proxy else None
payload = {
'username': self.slack_username_override,
'parse': self.slack_parse_override,
'text': self.slack_text_string,
'attachments': [
{
'color': self.slack_msg_color,
'title': self.create_title(matches),
'text': body,
'mrkdwn_in': ['text', 'pretext'],
'fields': []
}
]
}
# if we have defined fields, populate noteable fields for the alert
if self.slack_alert_fields != '':
payload['attachments'][0]['fields'] = self.populate_fields(matches)
if self.slack_icon_url_override != '':
payload['icon_url'] = self.slack_icon_url_override
else:
payload['icon_emoji'] = self.slack_emoji_override
if self.slack_title != '':
payload['attachments'][0]['title'] = self.slack_title
if self.slack_title_link != '':
payload['attachments'][0]['title_link'] = self.slack_title_link
for url in self.slack_webhook_url:
for channel_override in self.slack_channel_override:
try:
if self.slack_ignore_ssl_errors:
requests.packages.urllib3.disable_warnings()
payload['channel'] = channel_override
response = requests.post(
url, data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers, verify=not self.slack_ignore_ssl_errors,
proxies=proxies,
timeout=self.slack_timeout)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to slack: %s" % e)
elastalert_logger.info("Alert '%s' sent to Slack" % self.rule['name'])
def get_info(self):
return {'type': 'slack',
'slack_username_override': self.slack_username_override}
class MattermostAlerter(Alerter):
""" Creates a Mattermsot post for each alert """
required_options = frozenset(['mattermost_webhook_url'])
def __init__(self, rule):
super(MattermostAlerter, self).__init__(rule)
# HTTP config
self.mattermost_webhook_url = self.rule['mattermost_webhook_url']
if isinstance(self.mattermost_webhook_url, basestring):
self.mattermost_webhook_url = [self.mattermost_webhook_url]
self.mattermost_proxy = self.rule.get('mattermost_proxy', None)
self.mattermost_ignore_ssl_errors = self.rule.get('mattermost_ignore_ssl_errors', False)
# Override webhook config
self.mattermost_username_override = self.rule.get('mattermost_username_override', 'elastalert')
self.mattermost_channel_override = self.rule.get('mattermost_channel_override', '')
self.mattermost_icon_url_override = self.rule.get('mattermost_icon_url_override', '')
# Message properties
self.mattermost_msg_pretext = self.rule.get('mattermost_msg_pretext', '')
self.mattermost_msg_color = self.rule.get('mattermost_msg_color', 'danger')
self.mattermost_msg_fields = self.rule.get('mattermost_msg_fields', '')
def get_aggregation_summary_text__maximum_width(self):
width = super(MattermostAlerter, self).get_aggregation_summary_text__maximum_width()
# Reduced maximum width for prettier Mattermost display.
return min(width, 75)
def get_aggregation_summary_text(self, matches):
text = super(MattermostAlerter, self).get_aggregation_summary_text(matches)
if text:
text = u'```\n{0}```\n'.format(text)
return text
def populate_fields(self, matches):
alert_fields = []
missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
for field in self.mattermost_msg_fields:
field = copy.copy(field)
if 'args' in field:
args_values = [lookup_es_key(matches[0], arg) or missing for arg in field['args']]
if 'value' in field:
field['value'] = field['value'].format(*args_values)
else:
field['value'] = "\n".join(str(arg) for arg in args_values)
del(field['args'])
alert_fields.append(field)
return alert_fields
def alert(self, matches):
body = self.create_alert_body(matches)
title = self.create_title(matches)
# post to mattermost
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.mattermost_proxy} if self.mattermost_proxy else None
payload = {
'attachments': [
{
'fallback': "{0}: {1}".format(title, self.mattermost_msg_pretext),
'color': self.mattermost_msg_color,
'title': title,
'pretext': self.mattermost_msg_pretext,
'fields': []
}
]
}
if self.rule.get('alert_text_type') == 'alert_text_only':
payload['attachments'][0]['text'] = body
else:
payload['text'] = body
if self.mattermost_msg_fields != '':
payload['attachments'][0]['fields'] = self.populate_fields(matches)
if self.mattermost_icon_url_override != '':
payload['icon_url'] = self.mattermost_icon_url_override
if self.mattermost_username_override != '':
payload['username'] = self.mattermost_username_override
if self.mattermost_channel_override != '':
payload['channel'] = self.mattermost_channel_override
for url in self.mattermost_webhook_url:
try:
if self.mattermost_ignore_ssl_errors:
requests.urllib3.disable_warnings()
response = requests.post(
url, data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers, verify=not self.mattermost_ignore_ssl_errors,
proxies=proxies)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Mattermost: %s" % e)
elastalert_logger.info("Alert sent to Mattermost")
def get_info(self):
return {'type': 'mattermost',
'mattermost_username_override': self.mattermost_username_override,
'mattermost_webhook_url': self.mattermost_webhook_url}
class PagerDutyAlerter(Alerter):
""" Create an incident on PagerDuty for each alert """
required_options = frozenset(['pagerduty_service_key', 'pagerduty_client_name'])
def __init__(self, rule):
super(PagerDutyAlerter, self).__init__(rule)
self.pagerduty_service_key = self.rule['pagerduty_service_key']
self.pagerduty_client_name = self.rule['pagerduty_client_name']
self.pagerduty_incident_key = self.rule.get('pagerduty_incident_key', '')
self.pagerduty_incident_key_args = self.rule.get('pagerduty_incident_key_args', None)
self.pagerduty_event_type = self.rule.get('pagerduty_event_type', 'trigger')
self.pagerduty_proxy = self.rule.get('pagerduty_proxy', None)
self.pagerduty_api_version = self.rule.get('pagerduty_api_version', 'v1')
self.pagerduty_v2_payload_class = self.rule.get('pagerduty_v2_payload_class', '')
self.pagerduty_v2_payload_class_args = self.rule.get('pagerduty_v2_payload_class_args', None)
self.pagerduty_v2_payload_component = self.rule.get('pagerduty_v2_payload_component', '')
self.pagerduty_v2_payload_component_args = self.rule.get('pagerduty_v2_payload_component_args', None)
self.pagerduty_v2_payload_group = self.rule.get('pagerduty_v2_payload_group', '')
self.pagerduty_v2_payload_group_args = self.rule.get('pagerduty_v2_payload_group_args', None)
self.pagerduty_v2_payload_severity = self.rule.get('pagerduty_v2_payload_severity', 'critical')
self.pagerduty_v2_payload_source = self.rule.get('pagerduty_v2_payload_source', 'ElastAlert')
self.pagerduty_v2_payload_source_args = self.rule.get('pagerduty_v2_payload_source_args', None)
if self.pagerduty_api_version == 'v2':
self.url = 'https://events.pagerduty.com/v2/enqueue'
else:
self.url = 'https://events.pagerduty.com/generic/2010-04-15/create_event.json'
def alert(self, matches):
body = self.create_alert_body(matches)
# post to pagerduty
headers = {'content-type': 'application/json'}
if self.pagerduty_api_version == 'v2':
payload = {
'routing_key': self.pagerduty_service_key,
'event_action': self.pagerduty_event_type,
'dedup_key': self.get_incident_key(matches),
'client': self.pagerduty_client_name,
'payload': {
'class': self.resolve_formatted_key(self.pagerduty_v2_payload_class,
self.pagerduty_v2_payload_class_args,
matches),
'component': self.resolve_formatted_key(self.pagerduty_v2_payload_component,
self.pagerduty_v2_payload_component_args,
matches),
'group': self.resolve_formatted_key(self.pagerduty_v2_payload_group,
self.pagerduty_v2_payload_group_args,
matches),
'severity': self.pagerduty_v2_payload_severity,
'source': self.resolve_formatted_key(self.pagerduty_v2_payload_source,
self.pagerduty_v2_payload_source_args,
matches),
'summary': self.create_title(matches),
'custom_details': {
'information': body.encode('UTF-8'),
},
},
}
else:
payload = {
'service_key': self.pagerduty_service_key,
'description': self.create_title(matches),
'event_type': self.pagerduty_event_type,
'incident_key': self.get_incident_key(matches),
'client': self.pagerduty_client_name,
'details': {
"information": body.encode('UTF-8'),
},
}
# set https proxy, if it was provided
proxies = {'https': self.pagerduty_proxy} if self.pagerduty_proxy else None
try:
response = requests.post(
self.url,
data=json.dumps(payload, cls=DateTimeEncoder, ensure_ascii=False),
headers=headers,
proxies=proxies
)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to pagerduty: %s" % e)
if self.pagerduty_event_type == 'trigger':
elastalert_logger.info("Trigger sent to PagerDuty")
elif self.pagerduty_event_type == 'resolve':
elastalert_logger.info("Resolve sent to PagerDuty")
elif self.pagerduty_event_type == 'acknowledge':
elastalert_logger.info("acknowledge sent to PagerDuty")
def resolve_formatted_key(self, key, args, matches):
if args:
key_values = [lookup_es_key(matches[0], arg) for arg in args]
# Populate values with rule level properties too
for i in range(len(key_values)):
if key_values[i] is None:
key_value = self.rule.get(args[i])
if key_value:
key_values[i] = key_value
missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
key_values = [missing if val is None else val for val in key_values]
return key.format(*key_values)
else:
return key
def get_incident_key(self, matches):
if self.pagerduty_incident_key_args:
incident_key_values = [lookup_es_key(matches[0], arg) for arg in self.pagerduty_incident_key_args]
# Populate values with rule level properties too
for i in range(len(incident_key_values)):
if incident_key_values[i] is None:
key_value = self.rule.get(self.pagerduty_incident_key_args[i])
if key_value:
incident_key_values[i] = key_value
missing = self.rule.get('alert_missing_value', '<MISSING VALUE>')
incident_key_values = [missing if val is None else val for val in incident_key_values]
return self.pagerduty_incident_key.format(*incident_key_values)
else:
return self.pagerduty_incident_key
def get_info(self):
return {'type': 'pagerduty',
'pagerduty_client_name': self.pagerduty_client_name}
class PagerTreeAlerter(Alerter):
""" Creates a PagerTree Incident for each alert """
required_options = frozenset(['pagertree_integration_url'])
def __init__(self, rule):
super(PagerTreeAlerter, self).__init__(rule)
self.url = self.rule['pagertree_integration_url']
self.pagertree_proxy = self.rule.get('pagertree_proxy', None)
def alert(self, matches):
# post to pagertree
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.pagertree_proxy} if self.pagertree_proxy else None
payload = {
"event_type": "create",
"Id": str(uuid.uuid4()),
"Title": self.create_title(matches),
"Description": self.create_alert_body(matches)
}
try:
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to PagerTree: %s" % e)
elastalert_logger.info("Trigger sent to PagerTree")
def get_info(self):
return {'type': 'pagertree',
'pagertree_integration_url': self.url}
class ExotelAlerter(Alerter):
required_options = frozenset(['exotel_account_sid', 'exotel_auth_token', 'exotel_to_number', 'exotel_from_number'])
def __init__(self, rule):
super(ExotelAlerter, self).__init__(rule)
self.exotel_account_sid = self.rule['exotel_account_sid']
self.exotel_auth_token = self.rule['exotel_auth_token']
self.exotel_to_number = self.rule['exotel_to_number']
self.exotel_from_number = self.rule['exotel_from_number']
self.sms_body = self.rule.get('exotel_message_body', '')
def alert(self, matches):
client = Exotel(self.exotel_account_sid, self.exotel_auth_token)
try:
message_body = self.rule['name'] + self.sms_body
response = client.sms(self.rule['exotel_from_number'], self.rule['exotel_to_number'], message_body)
if response != 200:
raise EAException("Error posting to Exotel, response code is %s" % response)
except RequestException:
raise EAException("Error posting to Exotel"), None, sys.exc_info()[2]
elastalert_logger.info("Trigger sent to Exotel")
def get_info(self):
return {'type': 'exotel', 'exotel_account': self.exotel_account_sid}
class TwilioAlerter(Alerter):
required_options = frozenset(['twilio_account_sid', 'twilio_auth_token', 'twilio_to_number', 'twilio_from_number'])
def __init__(self, rule):
super(TwilioAlerter, self).__init__(rule)
self.twilio_account_sid = self.rule['twilio_account_sid']
self.twilio_auth_token = self.rule['twilio_auth_token']
self.twilio_to_number = self.rule['twilio_to_number']
self.twilio_from_number = self.rule['twilio_from_number']
def alert(self, matches):
client = TwilioClient(self.twilio_account_sid, self.twilio_auth_token)
try:
client.messages.create(body=self.rule['name'],
to=self.twilio_to_number,
from_=self.twilio_from_number)
except TwilioRestException as e:
raise EAException("Error posting to twilio: %s" % e)
elastalert_logger.info("Trigger sent to Twilio")
def get_info(self):
return {'type': 'twilio',
'twilio_client_name': self.twilio_from_number}
class VictorOpsAlerter(Alerter):
""" Creates a VictorOps Incident for each alert """
required_options = frozenset(['victorops_api_key', 'victorops_routing_key', 'victorops_message_type'])
def __init__(self, rule):
super(VictorOpsAlerter, self).__init__(rule)
self.victorops_api_key = self.rule['victorops_api_key']
self.victorops_routing_key = self.rule['victorops_routing_key']
self.victorops_message_type = self.rule['victorops_message_type']
self.victorops_entity_id = self.rule.get('victorops_entity_id', None)
self.victorops_entity_display_name = self.rule.get('victorops_entity_display_name', 'no entity display name')
self.url = 'https://alert.victorops.com/integrations/generic/20131114/alert/%s/%s' % (
self.victorops_api_key, self.victorops_routing_key)
self.victorops_proxy = self.rule.get('victorops_proxy', None)
def alert(self, matches):
body = self.create_alert_body(matches)
# post to victorops
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.victorops_proxy} if self.victorops_proxy else None
payload = {
"message_type": self.victorops_message_type,
"entity_display_name": self.victorops_entity_display_name,
"monitoring_tool": "ElastAlert",
"state_message": body
}
if self.victorops_entity_id:
payload["entity_id"] = self.victorops_entity_id
try:
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to VictorOps: %s" % e)
elastalert_logger.info("Trigger sent to VictorOps")
def get_info(self):
return {'type': 'victorops',
'victorops_routing_key': self.victorops_routing_key}
class TelegramAlerter(Alerter):
""" Send a Telegram message via bot api for each alert """
required_options = frozenset(['telegram_bot_token', 'telegram_room_id'])
def __init__(self, rule):
super(TelegramAlerter, self).__init__(rule)
self.telegram_bot_token = self.rule['telegram_bot_token']
self.telegram_room_id = self.rule['telegram_room_id']
self.telegram_api_url = self.rule.get('telegram_api_url', 'api.telegram.org')
self.url = 'https://%s/bot%s/%s' % (self.telegram_api_url, self.telegram_bot_token, "sendMessage")
self.telegram_proxy = self.rule.get('telegram_proxy', None)
self.telegram_proxy_login = self.rule.get('telegram_proxy_login', None)
self.telegram_proxy_password = self.rule.get('telegram_proxy_pass', None)
def alert(self, matches):
body = u'⚠ *%s* ⚠ ```\n' % (self.create_title(matches))
for match in matches:
body += unicode(BasicMatchString(self.rule, match))
# Separate text of aggregated alerts with dashes
if len(matches) > 1:
body += '\n----------------------------------------\n'
if len(body) > 4095:
body = body[0:4000] + u"\n⚠ *message was cropped according to telegram limits!* ⚠"
body += u' ```'
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.telegram_proxy} if self.telegram_proxy else None
auth = HTTPProxyAuth(self.telegram_proxy_login, self.telegram_proxy_password) if self.telegram_proxy_login else None
payload = {
'chat_id': self.telegram_room_id,
'text': body,
'parse_mode': 'markdown',
'disable_web_page_preview': True
}
try:
response = requests.post(self.url, data=json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies, auth=auth)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Telegram: %s. Details: %s" % (e, "" if e.response is None else e.response.text))
elastalert_logger.info(
"Alert sent to Telegram room %s" % self.telegram_room_id)
def get_info(self):
return {'type': 'telegram',
'telegram_room_id': self.telegram_room_id}
class GoogleChatAlerter(Alerter):
""" Send a notification via Google Chat webhooks """
required_options = frozenset(['googlechat_webhook_url'])
def __init__(self, rule):
super(GoogleChatAlerter, self).__init__(rule)
self.googlechat_webhook_url = self.rule['googlechat_webhook_url']
if isinstance(self.googlechat_webhook_url, basestring):
self.googlechat_webhook_url = [self.googlechat_webhook_url]
self.googlechat_format = self.rule.get('googlechat_format', 'basic')
self.googlechat_header_title = self.rule.get('googlechat_header_title', None)
self.googlechat_header_subtitle = self.rule.get('googlechat_header_subtitle', None)
self.googlechat_header_image = self.rule.get('googlechat_header_image', None)
self.googlechat_footer_kibanalink = self.rule.get('googlechat_footer_kibanalink', None)
def create_header(self):
header = None
if self.googlechat_header_title:
header = {
"title": self.googlechat_header_title,
"subtitle": self.googlechat_header_subtitle,
"imageUrl": self.googlechat_header_image
}
return header
def create_footer(self):
footer = None
if self.googlechat_footer_kibanalink:
footer = {"widgets": [{
"buttons": [{
"textButton": {
"text": "VISIT KIBANA",
"onClick": {
"openLink": {
"url": self.googlechat_footer_kibanalink
}
}
}
}]
}]
}
return footer
def create_card(self, matches):
card = {"cards": [{
"sections": [{
"widgets": [
{"textParagraph": {"text": self.create_alert_body(matches).encode('UTF-8')}}
]}
]}
]}
# Add the optional header
header = self.create_header()
if header:
card['cards'][0]['header'] = header
# Add the optional footer
footer = self.create_footer()
if footer:
card['cards'][0]['sections'].append(footer)
return card
def create_basic(self, matches):
body = self.create_alert_body(matches)
body = body.encode('UTF-8')
return {'text': body}
def alert(self, matches):
# Format message
if self.googlechat_format == 'card':
message = self.create_card(matches)
else:
message = self.create_basic(matches)
# Post to webhook
headers = {'content-type': 'application/json'}
for url in self.googlechat_webhook_url:
try:
response = requests.post(url, data=json.dumps(message), headers=headers)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to google chat: {}".format(e))
elastalert_logger.info("Alert sent to Google Chat!")
def get_info(self):
return {'type': 'googlechat',
'googlechat_webhook_url': self.googlechat_webhook_url}
class GitterAlerter(Alerter):
""" Creates a Gitter activity message for each alert """
required_options = frozenset(['gitter_webhook_url'])
def __init__(self, rule):
super(GitterAlerter, self).__init__(rule)
self.gitter_webhook_url = self.rule['gitter_webhook_url']
self.gitter_proxy = self.rule.get('gitter_proxy', None)
self.gitter_msg_level = self.rule.get('gitter_msg_level', 'error')
def alert(self, matches):
body = self.create_alert_body(matches)
# post to Gitter
headers = {'content-type': 'application/json'}
# set https proxy, if it was provided
proxies = {'https': self.gitter_proxy} if self.gitter_proxy else None
payload = {
'message': body,
'level': self.gitter_msg_level
}
try:
response = requests.post(self.gitter_webhook_url, json.dumps(payload, cls=DateTimeEncoder), headers=headers, proxies=proxies)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Gitter: %s" % e)
elastalert_logger.info("Alert sent to Gitter")
def get_info(self):
return {'type': 'gitter',
'gitter_webhook_url': self.gitter_webhook_url}
class ServiceNowAlerter(Alerter):
""" Creates a ServiceNow alert """
required_options = set([
'username',
'password',
'servicenow_rest_url',
'short_description',
'comments',
'assignment_group',
'category',
'subcategory',
'cmdb_ci',
'caller_id'
])
def __init__(self, rule):
super(ServiceNowAlerter, self).__init__(rule)
self.servicenow_rest_url = self.rule['servicenow_rest_url']
self.servicenow_proxy = self.rule.get('servicenow_proxy', None)
def alert(self, matches):
for match in matches:
# Parse everything into description.
description = str(BasicMatchString(self.rule, match))
# Set proper headers
headers = {
"Content-Type": "application/json",
"Accept": "application/json;charset=utf-8"
}
proxies = {'https': self.servicenow_proxy} if self.servicenow_proxy else None
payload = {
"description": description,
"short_description": self.rule['short_description'],
"comments": self.rule['comments'],
"assignment_group": self.rule['assignment_group'],
"category": self.rule['category'],
"subcategory": self.rule['subcategory'],
"cmdb_ci": self.rule['cmdb_ci'],
"caller_id": self.rule["caller_id"]
}
try:
response = requests.post(
self.servicenow_rest_url,
auth=(self.rule['username'], self.rule['password']),
headers=headers,
data=json.dumps(payload, cls=DateTimeEncoder),
proxies=proxies
)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to ServiceNow: %s" % e)
elastalert_logger.info("Alert sent to ServiceNow")
def get_info(self):
return {'type': 'ServiceNow',
'self.servicenow_rest_url': self.servicenow_rest_url}
class AlertaAlerter(Alerter):
""" Creates an Alerta event for each alert """
required_options = frozenset(['alerta_api_url'])
def __init__(self, rule):
super(AlertaAlerter, self).__init__(rule)
# Setup defaul parameters
self.url = self.rule.get('alerta_api_url', None)
self.api_key = self.rule.get('alerta_api_key', None)
self.timeout = self.rule.get('alerta_timeout', 86400)
self.use_match_timestamp = self.rule.get('alerta_use_match_timestamp', False)
self.use_qk_as_resource = self.rule.get('alerta_use_qk_as_resource', False)
self.verify_ssl = not self.rule.get('alerta_api_skip_ssl', False)
self.missing_text = self.rule.get('alert_missing_value', '<MISSING VALUE>')
# Fill up default values of the API JSON payload
self.severity = self.rule.get('alerta_severity', 'warning')
self.resource = self.rule.get('alerta_resource', 'elastalert')
self.environment = self.rule.get('alerta_environment', 'Production')
self.origin = self.rule.get('alerta_origin', 'elastalert')
self.service = self.rule.get('alerta_service', ['elastalert'])
self.text = self.rule.get('alerta_text', 'elastalert')
self.type = self.rule.get('alerta_type', 'elastalert')
self.event = self.rule.get('alerta_event', 'elastalert')
self.correlate = self.rule.get('alerta_correlate', [])
self.tags = self.rule.get('alerta_tags', [])
self.group = self.rule.get('alerta_group', '')
self.attributes_keys = self.rule.get('alerta_attributes_keys', [])
self.attributes_values = self.rule.get('alerta_attributes_values', [])
self.value = self.rule.get('alerta_value', '')
def alert(self, matches):
# Override the resource if requested
if self.use_qk_as_resource and 'query_key' in self.rule and lookup_es_key(matches[0], self.rule['query_key']):
self.resource = lookup_es_key(matches[0], self.rule['query_key'])
headers = {'content-type': 'application/json'}
if self.api_key is not None:
headers['Authorization'] = 'Key %s' % (self.rule['alerta_api_key'])
alerta_payload = self.get_json_payload(matches[0])
try:
response = requests.post(self.url, data=alerta_payload, headers=headers, verify=self.verify_ssl)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Alerta: %s" % e)
elastalert_logger.info("Alert sent to Alerta")
def create_default_title(self, matches):
title = '%s' % (self.rule['name'])
# If the rule has a query_key, add that value
if 'query_key' in self.rule:
qk = matches[0].get(self.rule['query_key'])
if qk:
title += '.%s' % (qk)
return title
def get_info(self):
return {'type': 'alerta',
'alerta_url': self.url}
def get_json_payload(self, match):
"""
Builds the API Create Alert body, as in
http://alerta.readthedocs.io/en/latest/api/reference.html#create-an-alert
For the values that could have references to fields on the match, resolve those references.
"""
# Using default text and event title if not defined in rule
alerta_text = self.rule['type'].get_match_str([match]) if self.text == '' else resolve_string(self.text, match, self.missing_text)
alerta_event = self.create_default_title([match]) if self.event == '' else resolve_string(self.event, match, self.missing_text)
match_timestamp = lookup_es_key(match, self.rule.get('timestamp_field', '@timestamp'))
if match_timestamp is None:
match_timestamp = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
if self.use_match_timestamp:
createTime = ts_to_dt(match_timestamp).strftime("%Y-%m-%dT%H:%M:%S.%fZ")
else:
createTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
alerta_payload_dict = {
'resource': resolve_string(self.resource, match, self.missing_text),
'severity': self.severity,
'timeout': self.timeout,
'createTime': createTime,
'type': self.type,
'environment': resolve_string(self.environment, match, self.missing_text),
'origin': resolve_string(self.origin, match, self.missing_text),
'group': resolve_string(self.group, match, self.missing_text),
'event': alerta_event,
'text': alerta_text,
'value': resolve_string(self.value, match, self.missing_text),
'service': [resolve_string(a_service, match, self.missing_text) for a_service in self.service],
'tags': [resolve_string(a_tag, match, self.missing_text) for a_tag in self.tags],
'correlate': [resolve_string(an_event, match, self.missing_text) for an_event in self.correlate],
'attributes': dict(zip(self.attributes_keys,
[resolve_string(a_value, match, self.missing_text) for a_value in self.attributes_values])),
'rawData': self.create_alert_body([match]),
}
try:
payload = json.dumps(alerta_payload_dict, cls=DateTimeEncoder)
except Exception as e:
raise Exception("Error building Alerta request: %s" % e)
return payload
class HTTPPostAlerter(Alerter):
""" Requested elasticsearch indices are sent by HTTP POST. Encoded with JSON. """
def __init__(self, rule):
super(HTTPPostAlerter, self).__init__(rule)
post_url = self.rule.get('http_post_url')
if isinstance(post_url, basestring):
post_url = [post_url]
self.post_url = post_url
self.post_proxy = self.rule.get('http_post_proxy')
self.post_payload = self.rule.get('http_post_payload', {})
self.post_static_payload = self.rule.get('http_post_static_payload', {})
self.post_all_values = self.rule.get('http_post_all_values', not self.post_payload)
self.post_http_headers = self.rule.get('http_post_headers', {})
self.timeout = self.rule.get('http_post_timeout', 10)
def alert(self, matches):
""" Each match will trigger a POST to the specified endpoint(s). """
for match in matches:
payload = match if self.post_all_values else {}
payload.update(self.post_static_payload)
for post_key, es_key in self.post_payload.items():
payload[post_key] = lookup_es_key(match, es_key)
headers = {
"Content-Type": "application/json",
"Accept": "application/json;charset=utf-8"
}
headers.update(self.post_http_headers)
proxies = {'https': self.post_proxy} if self.post_proxy else None
for url in self.post_url:
try:
response = requests.post(url, data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers, proxies=proxies, timeout=self.timeout)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting HTTP Post alert: %s" % e)
elastalert_logger.info("HTTP Post alert sent.")
def get_info(self):
return {'type': 'http_post',
'http_post_webhook_url': self.post_url}
class StrideHTMLParser(HTMLParser):
"""Parse html into stride's fabric structure"""
def __init__(self):
"""
Define a couple markup place holders.
"""
self.content = []
self.mark = None
HTMLParser.__init__(self)
def handle_starttag(self, tag, attrs):
"""Identify and verify starting tag is fabric compatible."""
if tag == 'b' or tag == 'strong':
self.mark = dict(type='strong')
if tag == 'u':
self.mark = dict(type='underline')
if tag == 'a':
self.mark = dict(type='link', attrs=dict(attrs))
def handle_endtag(self, tag):
"""Clear mark on endtag."""
self.mark = None
def handle_data(self, data):
"""Construct data node for our data."""
node = dict(type='text', text=data)
if self.mark:
node['marks'] = [self.mark]
self.content.append(node)
class StrideAlerter(Alerter):
""" Creates a Stride conversation message for each alert """
required_options = frozenset(
['stride_access_token', 'stride_cloud_id', 'stride_conversation_id'])
def __init__(self, rule):
super(StrideAlerter, self).__init__(rule)
self.stride_access_token = self.rule['stride_access_token']
self.stride_cloud_id = self.rule['stride_cloud_id']
self.stride_conversation_id = self.rule['stride_conversation_id']
self.stride_ignore_ssl_errors = self.rule.get('stride_ignore_ssl_errors', False)
self.stride_proxy = self.rule.get('stride_proxy', None)
self.url = 'https://api.atlassian.com/site/%s/conversation/%s/message' % (
self.stride_cloud_id, self.stride_conversation_id)
def alert(self, matches):
body = self.create_alert_body(matches).strip()
# parse body with StrideHTMLParser
parser = StrideHTMLParser()
parser.feed(body)
# Post to Stride
headers = {
'content-type': 'application/json',
'Authorization': 'Bearer {}'.format(self.stride_access_token)
}
# set https proxy, if it was provided
proxies = {'https': self.stride_proxy} if self.stride_proxy else None
# build stride json payload
# https://developer.atlassian.com/cloud/stride/apis/document/structure/
payload = {'body': {'version': 1, 'type': "doc", 'content': [
{'type': "panel", 'attrs': {'panelType': "warning"}, 'content': [
{'type': 'paragraph', 'content': parser.content}
]}
]}}
try:
if self.stride_ignore_ssl_errors:
requests.packages.urllib3.disable_warnings()
response = requests.post(
self.url, data=json.dumps(payload, cls=DateTimeEncoder),
headers=headers, verify=not self.stride_ignore_ssl_errors,
proxies=proxies)
warnings.resetwarnings()
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Stride: %s" % e)
elastalert_logger.info(
"Alert sent to Stride conversation %s" % self.stride_conversation_id)
def get_info(self):
return {'type': 'stride',
'stride_cloud_id': self.stride_cloud_id,
'stride_converstation_id': self.stride_converstation_id}
class LineNotifyAlerter(Alerter):
""" Created a Line Notify for each alert """
required_option = frozenset(["linenotify_access_token"])
def __init__(self, rule):
super(LineNotifyAlerter, self).__init__(rule)
self.linenotify_access_token = self.rule["linenotify_access_token"]
def alert(self, matches):
body = self.create_alert_body(matches)
# post to Line Notify
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": "Bearer {}".format(self.linenotify_access_token)
}
payload = {
"message": body
}
try:
response = requests.post("https://notify-api.line.me/api/notify", data=payload, headers=headers)
response.raise_for_status()
except RequestException as e:
raise EAException("Error posting to Line Notify: %s" % e)
elastalert_logger.info("Alert sent to Line Notify")
def get_info(self):
return {"type": "linenotify", "linenotify_access_token": self.linenotify_access_token}
class HiveAlerter(Alerter):
"""
Use matched data to create alerts containing observables in an instance of TheHive
"""
required_options = set(['hive_connection', 'hive_alert_config'])
def alert(self, matches):
connection_details = self.rule['hive_connection']
api = TheHiveApi(
'{hive_host}:{hive_port}'.format(**connection_details),
connection_details.get('hive_apikey', ''),
proxies=connection_details.get('hive_proxies', {'http': '', 'https': ''}),
cert=connection_details.get('hive_verify', False))
for match in matches:
context = {'rule': self.rule, 'match': match}
artifacts = []
for mapping in self.rule.get('hive_observable_data_mapping', []):
for observable_type, match_data_key in mapping.iteritems():
try:
match_data_keys = re.findall(r'\{match\[([^\]]*)\]', match_data_key)
rule_data_keys = re.findall(r'\{rule\[([^\]]*)\]', match_data_key)
data_keys = match_data_keys + rule_data_keys
context_keys = context['match'].keys() + context['rule'].keys()
if all([True if k in context_keys else False for k in data_keys]):
artifacts.append(AlertArtifact(dataType=observable_type, data=match_data_key.format(**context)))
except KeyError:
raise KeyError('\nformat string\n{}\nmatch data\n{}'.format(match_data_key, context))
alert_config = {
'artifacts': artifacts,
'sourceRef': str(uuid.uuid4())[0:6],
'title': '{rule[index]}_{rule[name]}'.format(**context)
}
alert_config.update(self.rule.get('hive_alert_config', {}))
for alert_config_field, alert_config_value in alert_config.iteritems():
if alert_config_field == 'customFields':
custom_fields = CustomFieldHelper()
for cf_key, cf_value in alert_config_value.iteritems():
try:
func = getattr(custom_fields, 'add_{}'.format(cf_value['type']))
except AttributeError:
raise Exception('unsupported custom field type {}'.format(cf_value['type']))
value = cf_value['value'].format(**context)
func(cf_key, value)
alert_config[alert_config_field] = custom_fields.build()
elif isinstance(alert_config_value, basestring):
alert_config[alert_config_field] = alert_config_value.format(**context)
elif isinstance(alert_config_value, (list, tuple)):
formatted_list = []
for element in alert_config_value:
try:
formatted_list.append(element.format(**context))
except (AttributeError, KeyError, IndexError):
formatted_list.append(element)
alert_config[alert_config_field] = formatted_list
alert = Alert(**alert_config)
response = api.create_alert(alert)
if response.status_code != 201:
raise Exception('alert not successfully created in TheHive\n{}'.format(response.text))
def get_info(self):
return {
'type': 'hivealerter',
'hive_host': self.rule.get('hive_connection', {}).get('hive_host', '')
}
| 44.210138
| 138
| 0.607926
|
b7b56c7729b8239a18a6d2e3c06b6868a4ef482d
| 23,028
|
py
|
Python
|
autoarray/inversion/pixelizations/delaunay.py
|
caoxiaoyue/PyAutoArray
|
e10d3d6a5b8dd031f2ad277486bd539bd5858b2a
|
[
"MIT"
] | null | null | null |
autoarray/inversion/pixelizations/delaunay.py
|
caoxiaoyue/PyAutoArray
|
e10d3d6a5b8dd031f2ad277486bd539bd5858b2a
|
[
"MIT"
] | null | null | null |
autoarray/inversion/pixelizations/delaunay.py
|
caoxiaoyue/PyAutoArray
|
e10d3d6a5b8dd031f2ad277486bd539bd5858b2a
|
[
"MIT"
] | null | null | null |
import copy
import numpy as np
from typing import Dict, Optional
from autoarray.structures.grids.two_d.grid_2d import Grid2D
from autoarray.structures.grids.two_d.grid_2d import Grid2DSparse
from autoarray.structures.grids.two_d.grid_2d_pixelization import Grid2DDelaunay
from autoarray.preloads import Preloads
from autoarray.inversion.pixelizations.abstract import AbstractPixelization
from autoarray.inversion.pixelizations.settings import SettingsPixelization
from autoarray.inversion.mappers.delaunay import MapperDelaunay
from autoarray.numba_util import profile_func
class Delaunay(AbstractPixelization):
def __init__(self):
"""
A pixelization associates a 2D grid of (y,x) coordinates (which are expected to be aligned with a masked
dataset) with a 2D grid of pixels.
The Delaunay pixelization represents pixels as an irregular 2D grid of Delaunay triangles.
Both of these grids (e.g. the masked dataset's 2D grid and the grid of the Delaunay pixelization's pixels)
have (y,x) coordinates in in two reference frames:
- `data`: the original reference frame of the masked data.
- `source`: a reference frame where grids in the `data` reference frame are transformed to a new reference
frame (e.g. their (y,x) coordinates may be shifted, stretched or have a more complicated operation performed
on them).
The grid associated with the masked dataset and Delaunay pixelization have the following variable names:
- `grid_slim`: the (y,x) grid of coordinates of the original masked data (which can be in the data frame and
given the variable name `data_grid_slim` or in the transformed source frame with the variable
name `source_grid_slim`).
- `pixelization_grid`: the (y,x) grid of Delaunay pixels which are associated with the `grid_slim` (y,x)
coordinates (association is always performed in the `source` reference frame).
A Delaunay pixelization has four grids associated with it: `data_grid_slim`, `source_grid_slim`,
`data_pixelization_grid` and `source_pixelization_grid`.
If a transformation of coordinates is not applied, the `data` frame and `source` frames are identical.
The (y,x) coordinates of the `source_pixelization_grid` represent the corners of the triangles in the
Delaunay triangulation.
Each (y,x) coordinate in the `source_grid_slim` is associated with the three nearest Delaunay triangle
corners. This association uses a weighted interpolation scheme whereby every `source_grid_slim` coordinate is
associated to Delaunay triangle corners with a higher weight if they are a closer distance to it.
In the project `PyAutoLens`, one's data is a masked 2D image. Its `data_grid_slim` is a 2D grid where every
(y,x) coordinate is aligned with the centre of every unmasked image pixel. A "lensing operation" transforms
this grid of (y,x) coordinates from the `data` frame to a new grid of (y,x) coordinates in the `source` frame.
The pixelization is then applied in the source frame.. In lensing terminology, the `data` frame is
the `image-plane` and `source` frame the `source-plane`.
"""
super().__init__()
@property
def uses_interpolation(self):
return False
def mapper_from(
self,
source_grid_slim: Grid2D,
source_pixelization_grid: Grid2DSparse = None,
data_pixelization_grid: Grid2DSparse = None,
hyper_image: np.ndarray = None,
settings=SettingsPixelization(),
preloads: Preloads = Preloads(),
profiling_dict: Optional[Dict] = None,
):
"""
Mapper objects describe the mappings between pixels in the masked 2D data and the pixels in a pixelization,
in both the `data` and `source` frames.
This function returns a `MapperDelaunay` as follows:
1) Before this routine is called, a sparse grid of (y,x) coordinates are computed from the 2D masked data,
the `data_pixelization_grid`, which acts as the Delaunay pixel centres of the pixelization and mapper.
2) Before this routine is called, operations are performed on this `data_pixelization_grid` that transform it
from a 2D grid which overlaps with the 2D mask of the data in the `data` frame to an irregular grid in
the `source` frame, the `source_pixelization_grid`.
3) If `settings.use_border=True`, the border of the input `source_grid_slim` is used to relocate all of the
grid's (y,x) coordinates beyond the border to the edge of the border.
4) If `settings.use_border=True`, the border of the input `source_grid_slim` is used to relocate all of the
transformed `source_pixelization_grid`'s (y,x) coordinates beyond the border to the edge of the border.
5) Use the transformed `source_pixelization_grid`'s (y,x) coordinates as the centres of the Delaunay
pixelization.
6) Return the `MapperDelaunay`.
Parameters
----------
source_grid_slim
A 2D grid of (y,x) coordinates associated with the unmasked 2D data after it has been transformed to the
`source` reference frame.
source_pixelization_grid
The centres of every Delaunay pixel in the `source` frame, which are initially derived by computing a sparse
set of (y,x) coordinates computed from the unmasked data in the `data` frame and applying a transformation
to this.
data_pixelization_grid
The sparse set of (y,x) coordinates computed from the unmasked data in the `data` frame. This has a
transformation applied to it to create the `source_pixelization_grid`.
hyper_image
Not used for a rectangular pixelization.
settings
Settings controlling the pixelization for example if a border is used to relocate its exterior coordinates.
preloads
Object which may contain preloaded arrays of quantities computed in the pixelization, which are passed via
this object speed up the calculation.
profiling_dict
A dictionary which contains timing of certain functions calls which is used for profiling.
"""
self.profiling_dict = profiling_dict
relocated_grid = self.relocated_grid_from(
source_grid_slim=source_grid_slim, settings=settings, preloads=preloads
)
relocated_pixelization_grid = self.relocate_pixelization_grid_via_border(
source_grid_slim=source_grid_slim,
source_pixelization_grid=source_pixelization_grid,
settings=settings,
)
try:
pixelization_grid = self.make_pixelization_grid(
source_grid_slim=relocated_grid,
source_pixelization_grid=relocated_pixelization_grid,
sparse_index_for_slim_index=source_pixelization_grid.sparse_index_for_slim_index,
)
return MapperDelaunay(
source_grid_slim=relocated_grid,
source_pixelization_grid=pixelization_grid,
data_pixelization_grid=data_pixelization_grid,
hyper_image=hyper_image,
profiling_dict=profiling_dict,
)
except ValueError as e:
raise e
@profile_func
def relocate_pixelization_grid_via_border(
self,
source_grid_slim: Grid2D,
source_pixelization_grid: Grid2DSparse,
settings: SettingsPixelization = SettingsPixelization(),
):
"""
Relocates all coordinates of the input `source_pixelization_grid` that are outside of a border (which
is defined by a grid of (y,x) coordinates) to the edge of this border.
The border is determined from the mask of the 2D data in the `data` frame before any transformations of the
data's grid are performed. The border is all pixels in this mask that are pixels at its extreme edge. These
pixel indexes are used to then determine a grid of (y,x) coordinates from the transformed `source_grid_grid` in
the `source` reference frame, whereby points located outside of it are relocated to the border's edge.
A full description of relocation is given in the method abstract_grid_2d.relocated_grid_from()`.
This is used in the project `PyAutoLens` to relocate the coordinates that are ray-traced near the centre of mass
of galaxies, which are heavily demagnified and may trace to outskirts of the source-plane well beyond the
border.
Parameters
----------
source_grid_slim
A 2D grid of (y,x) coordinates associated with the unmasked 2D data after it has been transformed to the
`source` reference frame.
source_pixelization_grid
The (y,x) coordinates of the corner of every Delaunay pixel in the `source` frame, which are initially
derived by computing a sparse set of (y,x) coordinates computed from the unmasked data in the `data` frame
and applying a transformation to this.
settings
Settings controlling the pixelization for example if a border is used to relocate its exterior coordinates.
"""
if settings.use_border:
return source_grid_slim.relocated_pixelization_grid_from(
pixelization_grid=source_pixelization_grid
)
return source_pixelization_grid
@profile_func
def make_pixelization_grid(
self,
source_grid_slim=None,
source_pixelization_grid=None,
sparse_index_for_slim_index=None,
):
"""
Return the Delaunay `source_pixelization_grid` as a `Grid2DDelaunay` object, which provides additional
functionality for performing operations that exploit the geometry of a Delaunay pixelization.
Parameters
----------
source_grid_slim
A 2D grid of (y,x) coordinates associated with the unmasked 2D data after it has been transformed to the
`source` reference frame.
source_pixelization_grid
The centres of every Delaunay pixel in the `source` frame, which are initially derived by computing a sparse
set of (y,x) coordinates computed from the unmasked data in the `data` frame and applying a transformation
to this.
settings
Settings controlling the pixelization for example if a border is used to relocate its exterior coordinates.
"""
return Grid2DDelaunay(
grid=source_pixelization_grid, uses_interpolation=self.uses_interpolation
)
class DelaunayMagnification(Delaunay):
def __init__(self, shape=(3, 3)):
"""
A pixelization associates a 2D grid of (y,x) coordinates (which are expected to be aligned with a masked
dataset) with a 2D grid of pixels. The Delaunay pixelization represents pixels as an irregular 2D grid of
Delaunay triangles.
Both of these grids (e.g. the masked dataset's 2D grid and the grid of the Delaunay pixelization's pixels)
have (y,x) coordinates in in two reference frames:
- `data`: the original reference frame of the masked data.
- `source`: a reference frame where grids in the `data` reference frame are transformed to a new reference
frame (e.g. their (y,x) coordinates may be shifted, stretched or have a more complicated operation performed
on them).
The grid associated with the masked dataset and Delaunay pixelization have the following variable names:
- `grid_slim`: the (y,x) grid of coordinates of the original masked data (which can be in the data frame and
given the variable name `data_grid_slim` or in the transformed source frame with the variable
name `source_grid_slim`).
- `pixelization_grid`: the (y,x) grid of Delaunay pixels which are associated with the `grid_slim` (y,x)
coordinates (association is always performed in the `source` reference frame).
A Delaunay pixelization has four grids associated with it: `data_grid_slim`, `source_grid_slim`,
`data_pixelization_grid` and `source_pixelization_grid`.
If a transformation of coordinates is not applied, the `data` frame and `source` frames are identical.
Each (y,x) coordinate in the `source_grid_slim` is associated with the three nearest Delaunay triangle
corners (when joined together with straight lines these corners form Delaunay triangles). This association
uses weighted interpolation whereby `source_grid_slim` coordinates are associated to the Delaunay corners with
a higher weight if they are a closer distance to one another.
For the `DelaunayMagnification` pixelization the corners of the Delaunay pixels are derived in the `data` frame,
by overlaying a uniform grid with the input `shape` over the masked data's grid. All coordinates in this
uniform grid which are contained within the mask are kept, have the same transformation applied to them as the
masked data's grid to map them to the source frame, where they form the pixelization's Delaunay pixel centres.
In the project `PyAutoLens`, one's data is a masked 2D image. Its `data_grid_slim` is a 2D grid where every
(y,x) coordinate is aligned with the centre of every unmasked image pixel. A "lensing operation" transforms
this grid of (y,x) coordinates from the `data` frame to a new grid of (y,x) coordinates in the `source` frame.
The pixelization is then applied in the source frame.. In lensing terminology, the `data` frame is
the `image-plane` and `source` frame the `source-plane`.
Parameters
----------
shape
The shape of the unmasked `pixelization_grid` in the `data` frame which is laid over the masked image, in
order to derive the centres of the Delaunay pixels in the `data` frame.
"""
super().__init__()
self.shape = (int(shape[0]), int(shape[1]))
self.pixels = self.shape[0] * self.shape[1]
def data_pixelization_grid_from(
self,
data_grid_slim: Grid2D,
hyper_image: np.ndarray = None,
settings=SettingsPixelization(),
):
"""
Computes the `pixelization_grid` in the `data` frame, by overlaying a uniform grid of coordinates over the
masked 2D data (see `Grid2DSparse.from_grid_and_unmasked_2d_grid_shape()`).
For a `DelaunayMagnification` this grid is computed by overlaying a 2D grid with dimensions `shape` over the
masked 2D data in the `data` frame, whereby all (y,x) coordinates in this grid which are not masked are
retained.
Parameters
----------
data_pixelization_grid
The sparse set of (y,x) coordinates computed from the unmasked data in the `data` frame. This has a
transformation applied to it to create the `source_pixelization_grid`.
hyper_image
An image which is used to determine the `data_pixelization_grid` and therefore adapt the distribution of
pixels of the Delaunay grid to the data it discretizes.
settings
Settings controlling the pixelization for example if a border is used to relocate its exterior coordinates.
"""
return Grid2DSparse.from_grid_and_unmasked_2d_grid_shape(
grid=data_grid_slim, unmasked_sparse_shape=self.shape
)
class DelaunayBrightnessImage(Delaunay):
def __init__(self, pixels=10, weight_floor=0.0, weight_power=0.0):
"""
A pixelization associates a 2D grid of (y,x) coordinates (which are expected to be aligned with a masked
dataset) with a 2D grid of pixels. The Delaunay pixelization represents pixels as an irregular 2D grid of
Delaunay triangles.
Both of these grids (e.g. the masked dataset's 2D grid and the grid of the Delaunay pixelization's pixels)
have (y,x) coordinates in in two reference frames:
- `data`: the original reference frame of the masked data.
- `source`: a reference frame where grids in the `data` reference frame are transformed to a new reference
frame (e.g. their (y,x) coordinates may be shifted, stretched or have a more complicated operation performed
on them).
The grid associated with the masked dataset and Delaunay pixelization have the following variable names:
- `grid_slim`: the (y,x) grid of coordinates of the original masked data (which can be in the data frame and
given the variable name `data_grid_slim` or in the transformed source frame with the variable
name `source_grid_slim`).
- `pixelization_grid`: the (y,x) grid of Delaunay pixels which are associated with the `grid_slim` (y,x)
coordinates (association is always performed in the `source` reference frame).
A Delaunay pixelization has four grids associated with it: `data_grid_slim`, `source_grid_slim`,
`data_pixelization_grid` and `source_pixelization_grid`.
If a transformation of coordinates is not applied, the `data` frame and `source` frames are identical.
Each (y,x) coordinate in the `source_grid_slim` is associated with the three nearest Delaunay triangle
corners (when joined together with straight lines these corners form Delaunay triangles). This association
uses weighted interpolation whereby `source_grid_slim` coordinates are associated to the Delaunay corners with
a higher weight if they are a closer distance to one another.
For the `DelaunayBrightnessImage` pixelization the corners of the Delaunay trinagles are derived in
the `data` frame, by applying a KMeans clustering algorithm to the masked data's values. These values are use
compute `pixels` number of pixels, where the `weight_floor` and `weight_power` allow the KMeans algorithm to
adapt the derived pixel centre locations to the data's brighest or faintest values.
In the project `PyAutoLens`, one's data is a masked 2D image. Its `data_grid_slim` is a 2D grid where every
(y,x) coordinate is aligned with the centre of every unmasked image pixel. A "lensing operation" transforms
this grid of (y,x) coordinates from the `data` frame to a new grid of (y,x) coordinates in the `source` frame.
The pixelization is then applied in the source frame.. In lensing terminology, the `data` frame is
the `image-plane` and `source` frame the `source-plane`.
Parameters
----------
pixels
The total number of pixels in the Delaunay pixelization, which is therefore also the number of (y,x)
coordinates computed via the KMeans clustering algorithm in data frame.
weight_floor
A parameter which reweights the data values the KMeans algorithm is applied too; as the floor increases
more weight is applied to values with lower values thus allowing Delaunay pixels to be placed in these
regions of the data.
weight_power
A parameter which reweights the data values the KMeans algorithm is applied too; as the power increases
more weight is applied to values with higher values thus allowing Delaunay pixels to be placed in these
regions of the data.
"""
super().__init__()
self.pixels = int(pixels)
self.weight_floor = weight_floor
self.weight_power = weight_power
def weight_map_from(self, hyper_image: np.ndarray):
"""
Computes a `weight_map` from an input `hyper_image`, where this image represents components in the masked 2d
data in the `data` frame. This applies the `weight_floor` and `weight_power` attributes of the class, which
scale the weights to make different components upweighted relative to one another.
Parameters
----------
hyper_image
A image which represents one or more components in the masked 2D data in the `data` frame.
Returns
-------
The weight map which is used to adapt the Delaunay pixels in the `data` frame to components in the data.
"""
weight_map = (hyper_image - np.min(hyper_image)) / (
np.max(hyper_image) - np.min(hyper_image)
) + self.weight_floor * np.max(hyper_image)
return np.power(weight_map, self.weight_power)
def data_pixelization_grid_from(
self,
data_grid_slim: Grid2D,
hyper_image: np.ndarray,
settings=SettingsPixelization(),
):
"""
Computes the `pixelization_grid` in the `data` frame, by overlaying a uniform grid of coordinates over the
masked 2D data (see `Grid2DSparse.from_grid_and_unmasked_2d_grid_shape()`).
The `data_pixelizaiton_grid` is transformed to the `source_pixelization_grid`, and it is these (y,x) values
which then act the centres of the Delaunay pixelization's pixels.
For a `DelaunayBrightnessImage` this grid is computed by applying a KMeans clustering algorithm to the masked
data's values, where these values are reweighted by the `hyper_image` so that the algorithm can adapt to
specific parts of the data.
Parameters
----------
data_pixelization_grid
The sparse set of (y,x) coordinates computed from the unmasked data in the `data` frame. This has a
transformation applied to it to create the `source_pixelization_grid`.
hyper_image
An image which is used to determine the `data_pixelization_grid` and therefore adapt the distribution of
pixels of the Delaunay grid to the data it discretizes.
settings
Settings controlling the pixelization for example if a border is used to relocate its exterior coordinates.
"""
weight_map = self.weight_map_from(hyper_image=hyper_image)
return Grid2DSparse.from_total_pixels_grid_and_weight_map(
total_pixels=self.pixels,
grid=data_grid_slim,
weight_map=weight_map,
seed=settings.kmeans_seed,
stochastic=settings.is_stochastic,
)
@property
def is_stochastic(self) -> bool:
return True
| 52.816514
| 121
| 0.6823
|
bf528c84dcdaf227e43c27f0073e4ebbb28d13d8
| 1,157
|
py
|
Python
|
src/jetbot_sdf/scripts/move.py
|
Tao-wecorp/jetbot_sdf
|
6b5b873b36eca068404ee5477ffc477dccd76c7f
|
[
"MIT"
] | null | null | null |
src/jetbot_sdf/scripts/move.py
|
Tao-wecorp/jetbot_sdf
|
6b5b873b36eca068404ee5477ffc477dccd76c7f
|
[
"MIT"
] | null | null | null |
src/jetbot_sdf/scripts/move.py
|
Tao-wecorp/jetbot_sdf
|
6b5b873b36eca068404ee5477ffc477dccd76c7f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import rospy
import rospkg
from math import pi
from geometry_msgs.msg import Pose, Quaternion
from gazebo_msgs.msg import ModelState
from gazebo_msgs.srv import SetModelState
from tf.transformations import quaternion_from_euler
def main():
rospy.init_node('set_pose')
state_msg = ModelState()
state_msg.model_name = 'jetbot'
state_msg.pose.position.x = 0
state_msg.pose.position.y = 0
state_msg.pose.position.z = 0
state_msg.pose.orientation = Quaternion(*quaternion_from_euler(0.0, 0.0, 90*pi/180))
state_msg.twist.linear.x = 0
state_msg.twist.linear.y = 0
state_msg.twist.linear.z = 0
state_msg.twist.angular.x = 0
state_msg.twist.angular.y = 0
state_msg.twist.angular.z = 0
state_msg.reference_frame = 'world'
rospy.wait_for_service('/gazebo/set_model_state')
try:
set_state = rospy.ServiceProxy('/gazebo/set_model_state', SetModelState)
set_state( state_msg )
except rospy.ServiceException, e:
print "Service call failed: %s" % e
if __name__ == '__main__':
try:
main()
except rospy.ROSInterruptException:
pass
| 30.447368
| 88
| 0.709594
|
783633bd3478c828fc71a31369c20594787fb448
| 523
|
py
|
Python
|
tests/flaky_tests_check.py
|
fracting/ml-recsys-tools
|
b49ef6f8aa53ab929418ab71e1898c1a67f82f63
|
[
"MIT"
] | 16
|
2018-04-12T06:57:51.000Z
|
2021-02-01T14:35:23.000Z
|
tests/flaky_tests_check.py
|
fracting/ml-recsys-tools
|
b49ef6f8aa53ab929418ab71e1898c1a67f82f63
|
[
"MIT"
] | null | null | null |
tests/flaky_tests_check.py
|
fracting/ml-recsys-tools
|
b49ef6f8aa53ab929418ab71e1898c1a67f82f63
|
[
"MIT"
] | 4
|
2018-11-08T02:56:20.000Z
|
2020-08-11T07:24:20.000Z
|
import os
import unittest
from collections import Counter
root_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), '..')
n_flakes = 0
flakes = Counter()
for i in range(50):
# run tests
suite = unittest.TestLoader().discover(root_dir)
result = unittest.TextTestRunner().run(suite)
fails = result.failures + result.errors
if fails:
flakes[fails[0][0]._testMethodName] += 1
n_flakes += 1
print(f'flakiness: {n_flakes / (i + 1)}')
print(f'problematic tests: {flakes}')
| 26.15
| 73
| 0.667304
|
f29177c3e1b275e64040855f2436499cd183cc61
| 1,271
|
py
|
Python
|
test.py
|
thisisdevanshu/Landmark-Detection-CNN
|
6d25ba39119eece29cf8eead8e35003c58e5b06e
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
thisisdevanshu/Landmark-Detection-CNN
|
6d25ba39119eece29cf8eead8e35003c58e5b06e
|
[
"Apache-2.0"
] | null | null | null |
test.py
|
thisisdevanshu/Landmark-Detection-CNN
|
6d25ba39119eece29cf8eead8e35003c58e5b06e
|
[
"Apache-2.0"
] | null | null | null |
from keras.models import load_model
from keras.preprocessing import image
import keras
import numpy as np
import argparse
classifier = load_model('model')
test_image = image.load_img('LD_Test/phy.jpg', target_size = (300, 300))
parser = argparse.ArgumentParser()
parser.add_argument(
'--image', type=str, default='', help='Image Path')
args = parser.parse_args()
if args.image:
test_image = image.load_img(args.image, target_size = (300, 300))
test_image = image.img_to_array(test_image)
test_image = np.expand_dims(test_image, axis = 0)
result = classifier.predict(test_image)
result_class = result.argmax(axis = 1)
if(result_class == 0):
print('Architecture')
if(result_class == 1):
print('Ben Hill Griffin Staidum')
if(result_class == 2):
print('Benton Hall')
if(result_class == 3):
print('Black Hall')
if(result_class == 4):
print('Century Tower')
if(result_class == 5):
print('CSE')
if(result_class == 6):
print('Hub')
if(result_class == 7):
print('Library West')
if(result_class == 8):
print('Little Hall')
if(result_class == 9):
print('Marston Library')
if(result_class == 10):
print('NEB')
if(result_class == 11):
print('Physics')
if(result_class == 12):
print('Reitz Union')
if(result_class == 13):
print('Turlington Hall')
print(result[0])
| 24.442308
| 72
| 0.717545
|
34c579ffd9ed04034a61ff09ea5d03d3aa2d06cc
| 1,725
|
py
|
Python
|
simulation/logging/run.py
|
christo4ferris/spacetech-kubesat
|
e558153781a82b146f83d5ef9121830c2d12920c
|
[
"Apache-2.0"
] | null | null | null |
simulation/logging/run.py
|
christo4ferris/spacetech-kubesat
|
e558153781a82b146f83d5ef9121830c2d12920c
|
[
"Apache-2.0"
] | null | null | null |
simulation/logging/run.py
|
christo4ferris/spacetech-kubesat
|
e558153781a82b146f83d5ef9121830c2d12920c
|
[
"Apache-2.0"
] | null | null | null |
import argparse
from logging_service import simulation
def clean(stri):
stri = stri.strip(" ")
stri = stri.strip("\n")
stri = stri.strip("\r")
stri = stri.strip("\t")
return stri
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-u', '--nats_user', help='Username for the NATS connection')
parser.add_argument('-p', '--nats_password', help='Password for the NATS connection')
parser.add_argument('-s', '--nats_host', help='Port for the NATS connect')
parser.add_argument('-r', '--redis_password', help='Password for the Redis connection')
parser.add_argument('-d', '--redis_host', help='Host for the Redis connection')
parser.add_argument('-a', '--api_host', help='Hostname for the API')
parser.add_argument('-t', '--api_port', help='Port for the API')
args = parser.parse_args()
print(f"Nats user: {args.nats_user}")
print(f"Nats password: {args.nats_password}")
print(f"Redis password: {args.redis_password}")
print(f"Api host: {args.api_host}")
print(f"Api port: {args.api_port}")
arguments = {}
if args.nats_user:
arguments["nats_user"] = clean(args.nats_user)
if args.nats_password:
arguments["nats_password"] = clean(args.nats_password)
if args.nats_host:
arguments["nats_host"] = clean(args.nats_host)
if args.redis_password:
arguments["redis_password"] = clean(args.redis_password)
if args.redis_host:
arguments["redis_host"] = clean(args.redis_host)
if args.api_host:
arguments["api_host"] = clean(args.api_host)
if args.api_port:
arguments["api_port"] = int(clean(args.api_port))
simulation.run(**arguments)
| 38.333333
| 91
| 0.666087
|
9d8c4c5012283ca0772b05661f315a08ae526236
| 527
|
py
|
Python
|
plotly/validators/scatter3d/line/_cmax.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/scatter3d/line/_cmax.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/scatter3d/line/_cmax.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
import _plotly_utils.basevalidators
class CmaxValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name='cmax', parent_name='scatter3d.line', **kwargs
):
super(CmaxValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop('edit_type', 'calc'),
implied_edits=kwargs.pop('implied_edits', {'cauto': False}),
role=kwargs.pop('role', 'info'),
**kwargs
)
| 31
| 72
| 0.626186
|
c44ccf6d80bbcb59e2a0aa122e351a457720e906
| 2,549
|
py
|
Python
|
app/tests/api/alert_tool/test_create.py
|
hollyfoxx/ace2-ams-api
|
08ecf3f3dc8ac9abd224465731458950d4f78b7d
|
[
"Apache-2.0"
] | null | null | null |
app/tests/api/alert_tool/test_create.py
|
hollyfoxx/ace2-ams-api
|
08ecf3f3dc8ac9abd224465731458950d4f78b7d
|
[
"Apache-2.0"
] | null | null | null |
app/tests/api/alert_tool/test_create.py
|
hollyfoxx/ace2-ams-api
|
08ecf3f3dc8ac9abd224465731458950d4f78b7d
|
[
"Apache-2.0"
] | null | null | null |
import pytest
import uuid
from fastapi import status
#
# INVALID TESTS
#
@pytest.mark.parametrize(
"key,value",
[
("description", 123),
("description", ""),
("uuid", None),
("uuid", 1),
("uuid", "abc"),
("uuid", ""),
("value", 123),
("value", None),
("value", ""),
],
)
def test_create_invalid_fields(client_valid_access_token, key, value):
create_json = {"value": "test"}
create_json[key] = value
create = client_valid_access_token.post("/api/alert/tool/", json=create_json)
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
@pytest.mark.parametrize(
"key",
[
("uuid"),
("value"),
],
)
def test_create_duplicate_unique_fields(client_valid_access_token, key):
# Create an object
create1_json = {"uuid": str(uuid.uuid4()), "value": "test"}
client_valid_access_token.post("/api/alert/tool/", json=create1_json)
# Ensure you cannot create another object with the same unique field value
create2_json = {"value": "test2"}
create2_json[key] = create1_json[key]
create2 = client_valid_access_token.post("/api/alert/tool/", json=create2_json)
assert create2.status_code == status.HTTP_409_CONFLICT
@pytest.mark.parametrize(
"key",
[
("value"),
],
)
def test_create_missing_required_fields(client_valid_access_token, key):
create_json = {"value": "test"}
del create_json[key]
create = client_valid_access_token.post("/api/alert/tool/", json=create_json)
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
#
# VALID TESTS
#
@pytest.mark.parametrize(
"key,value",
[("description", None), ("description", "test"), ("uuid", str(uuid.uuid4()))],
)
def test_create_valid_optional_fields(client_valid_access_token, key, value):
# Create the object
create = client_valid_access_token.post("/api/alert/tool/", json={key: value, "value": "test"})
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.json()[key] == value
def test_create_valid_required_fields(client_valid_access_token):
# Create the object
create = client_valid_access_token.post("/api/alert/tool/", json={"value": "test"})
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client_valid_access_token.get(create.headers["Content-Location"])
assert get.json()["value"] == "test"
| 27.706522
| 99
| 0.66889
|
2a7391a9aec80fed83d3bb1cfd12b885693155f5
| 2,368
|
py
|
Python
|
lightautoml/ml_algo/utils.py
|
PhySci/LightAutoML
|
e860991ba36361629fe9e11e034c5e976fe4219b
|
[
"Apache-2.0"
] | 1
|
2021-10-02T05:28:16.000Z
|
2021-10-02T05:28:16.000Z
|
lightautoml/ml_algo/utils.py
|
DESimakov/LightAutoML
|
2eaa05b27c63c613965d50cdb7d52da5d245d9af
|
[
"Apache-2.0"
] | null | null | null |
lightautoml/ml_algo/utils.py
|
DESimakov/LightAutoML
|
2eaa05b27c63c613965d50cdb7d52da5d245d9af
|
[
"Apache-2.0"
] | null | null | null |
"""Tools for model training."""
import logging
from typing import Optional
from typing import Tuple
from ..dataset.base import LAMLDataset
from ..validation.base import TrainValidIterator
from .base import MLAlgo
from .tuning.base import ParamsTuner
logger = logging.getLogger(__name__)
def tune_and_fit_predict(
ml_algo: MLAlgo,
params_tuner: ParamsTuner,
train_valid: TrainValidIterator,
force_calc: bool = True,
) -> Tuple[Optional[MLAlgo], Optional[LAMLDataset]]:
"""Tune new algorithm, fit on data and return algo and predictions.
Args:
ml_algo: ML algorithm that will be tuned.
params_tuner: Tuner object.
train_valid: Classic cv-iterator.
force_calc: Flag if single fold of ml_algo should be calculated anyway.
Returns:
Tuple (BestMlAlgo, predictions).
"""
timer = ml_algo.timer
timer.start()
single_fold_time = timer.estimate_folds_time(1)
# if force_calc is False we check if it make sense to continue
if not force_calc and (
(single_fold_time is not None and single_fold_time > timer.time_left)
or timer.time_limit_exceeded()
):
return None, None
if params_tuner.best_params is None:
# this try/except clause was added because catboost died for some unexpected reason
try:
# TODO: Set some conditions to the tuner
new_algo, preds = params_tuner.fit(ml_algo, train_valid)
except Exception as e:
logger.info2(
"Model {0} failed during params_tuner.fit call.\n\n{1}".format(
ml_algo.name, e
)
)
return None, None
if preds is not None:
return new_algo, preds
if not force_calc and (
(single_fold_time is not None and single_fold_time > timer.time_left)
or timer.time_limit_exceeded()
):
return None, None
ml_algo.params = params_tuner.best_params
# this try/except clause was added because catboost died for some unexpected reason
try:
preds = ml_algo.fit_predict(train_valid)
except Exception as e:
logger.info2(
"Model {0} failed during ml_algo.fit_predict call.\n\n{1}".format(
ml_algo.name, e
)
)
return None, None
return ml_algo, preds
| 28.878049
| 91
| 0.649916
|
14a8a0a4e8d1fbe1422e4931181a0cd948d167c7
| 4,064
|
py
|
Python
|
spatialtis/_plotting/base/expression_map.py
|
Mr-Milk/SpatialTis
|
bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6
|
[
"Apache-2.0"
] | 10
|
2020-07-14T13:27:35.000Z
|
2021-11-24T21:41:30.000Z
|
spatialtis/_plotting/base/expression_map.py
|
Mr-Milk/SpatialTis
|
bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6
|
[
"Apache-2.0"
] | 21
|
2021-01-10T09:39:25.000Z
|
2022-03-12T01:04:52.000Z
|
spatialtis/_plotting/base/expression_map.py
|
Mr-Milk/SpatialTis
|
bcdc6df5213b8b256cbe4c9a7c0f3b5e6d3c56b6
|
[
"Apache-2.0"
] | null | null | null |
from typing import List, Tuple
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import pyecharts.options as opts
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from pyecharts.charts import Bar3D
from spatialtis._plotting.abc import MatplotlibMixin, PyechartsMixin
from spatialtis._plotting.base.palette import get_linear_colors
from spatialtis.typing import Number
from spatialtis.utils import doc
@doc
class expression_map_3d(PyechartsMixin):
"""Visualize marker expression in 3D, Pyecharts
Args:
points: The locations of each cells
expressions: The expression level for each cell
axis_size: The length of x,y,z axis
**plot_options: {plot_options}
"""
def __init__(
self,
points: List[Tuple[Number, Number]],
expressions: List[Number],
axis_size: tuple = (100, 100, 80),
**plot_options,
):
super().__init__(**plot_options)
if self.palette is None:
self.palette = ["RdYlBu"]
colors = get_linear_colors(self.palette)
zdata = []
for cell, exp in zip(points, expressions):
zdata.append([round(cell[0], 2), round(cell[1], 2), float(exp)])
zrange = sorted([i[2] for i in zdata])
self.plot = Bar3D(
init_opts=opts.InitOpts(
width=f"{self.size[0]}px",
height=f"{self.size[1]}px",
theme=self.theme,
renderer=self.renderer,
)
)
self.plot.add(
series_name="",
shading="color",
data=zdata,
xaxis3d_opts=opts.Axis3DOpts(type_="value"),
yaxis3d_opts=opts.Axis3DOpts(type_="value"),
zaxis3d_opts=opts.Axis3DOpts(type_="value"),
grid3d_opts=opts.Grid3DOpts(
width=axis_size[1], height=axis_size[2], depth=axis_size[0]
),
).set_global_opts(
visualmap_opts=opts.VisualMapOpts(
dimension=2,
max_=zrange[-1],
min_=zrange[0],
range_color=colors,
),
toolbox_opts=opts.ToolboxOpts(
feature={
"saveAsImage": {
"title": "Save",
"pixelRatio": 5,
},
"restore": {"title": "Restore"},
},
),
)
self.set_up()
@doc
class expression_map_static(MatplotlibMixin):
"""Visualize marker expression, Matplotlib
Args:
points: The locations of each cells
expressions: The expression level for each cell
cell_size: The size of cell
**plot_options: {plot_options}
"""
def __init__(
self,
points: List[Tuple[Number, Number]],
expressions: List[Number],
cell_size: int = 5,
**plot_options,
):
super().__init__(**plot_options)
if self.palette is None:
self.palette = ["Magma"]
colors = get_linear_colors(self.palette)[::-1]
cmap = mpl.colors.LinearSegmentedColormap.from_list("", colors)
x, y = [], []
for p in points:
x.append(p[0])
y.append(p[1])
self.fig, self.ax = plt.subplots(figsize=self.size)
ss = self.ax.scatter(x=x, y=y, c=expressions, s=cell_size, cmap=cmap)
# Remove the legend and add a colorbar
self.ax.set_aspect("equal")
plt.axis("off")
axins = inset_axes(
self.ax,
width="3%", # width = 5% of parent_bbox width
height="50%", # height : 50%
loc="lower left",
bbox_to_anchor=(1, 0.05, 1, 1),
bbox_transform=self.ax.transAxes,
borderpad=0,
)
ticks = np.linspace(min(expressions), max(expressions), num=6)
cbar = self.fig.colorbar(ss, cax=axins, ticks=ticks)
cbar.set_ticklabels([round(i, 1) for i in ticks])
self.set_up()
| 30.787879
| 77
| 0.559055
|
2c2ff08622f924a25c56ae4cd8a8a6378b30bdc7
| 5,058
|
py
|
Python
|
CB/jogodavelha.py
|
SkiNgK/cb.2.16
|
060c83413e3374613d93b82eaea3f73af6818ea9
|
[
"MIT"
] | null | null | null |
CB/jogodavelha.py
|
SkiNgK/cb.2.16
|
060c83413e3374613d93b82eaea3f73af6818ea9
|
[
"MIT"
] | null | null | null |
CB/jogodavelha.py
|
SkiNgK/cb.2.16
|
060c83413e3374613d93b82eaea3f73af6818ea9
|
[
"MIT"
] | null | null | null |
# encoding: utf -8
matriz = [['-','-','-'],
['-','-','-'],
['-','-','-']]
print'\n'
print'\t 0 1 2'
print'\t0 %s | %s | %s ' % (matriz[0][0], matriz[0][1], matriz[0][2])
print'\t ---------- '
print'\t1 %s | %s | %s ' % (matriz[1][0], matriz[1][1], matriz[1][2])
print'\t ----------'
print'\t2 %s | %s | %s ' % (matriz[2][0], matriz[2][1], matriz[2][2])
print'\n'
print 'Jogador 1 = X e Jogador 2 = O'
vez=2
op = 'S'
velha=0
while op=='S':
for i in range(100):
if vez%2==0:
print 'Jogador 1, digite as coordenadas da sua jogada'
linha = input ('Linha: ')
coluna = input ('Coluna: ')
if matriz[linha][coluna]=='-':
matriz[linha][coluna]='X'
else:
print 'Jogada inválida, digite novamente'
linha = input ('Linha: ')
coluna = input ('Coluna: ')
matriz[linha][coluna]='X'
print '\n' * 100
print'\n'
print'\t 0 1 2'
print'\t0 %s | %s | %s ' % (matriz[0][0], matriz[0][1], matriz[0][2])
print'\t ---------- '
print'\t1 %s | %s | %s ' % (matriz[1][0], matriz[1][1], matriz[1][2])
print'\t ----------'
print'\t2 %s | %s | %s ' % (matriz[2][0], matriz[2][1], matriz[2][2])
print'\n'
else:
print 'Jogador 2, digite as coordenadas da sua jogada'
linha = input ('Linha: ')
coluna = input ('Coluna: ')
if matriz[linha][coluna]=='-':
matriz[linha][coluna]='O'
else:
print 'Jogada inválida, digite novamente'
linha = input ('Linha: ')
coluna = input ('Coluna: ')
matriz[linha][coluna]='O'
print '\n' * 100
print'\n'
print'\t 0 1 2'
print'\t0 %s | %s | %s ' % (matriz[0][0], matriz[0][1], matriz[0][2])
print'\t ---------- '
print'\t1 %s | %s | %s ' % (matriz[1][0], matriz[1][1], matriz[1][2])
print'\t ----------'
print'\t2 %s | %s | %s ' % (matriz[2][0], matriz[2][1], matriz[2][2])
print'\n'
vez+=1
#verificando as linhas
if(matriz[0][0]=='X' and matriz[0][1]=='X' and matriz[0][2]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[1][0]=='X' and matriz[1][1]=='X' and matriz[1][2]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[2][0]=='X' and matriz[2][1]=='X' and matriz[2][2]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][0]=='O' and matriz[0][1]=='O' and matriz[0][2]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(matriz[1][0]=='O' and matriz[1][1]=='O' and matriz[1][2]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(matriz[2][0]=='O' and matriz[2][1]=='O' and matriz[2][2]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
#verificando as colunas
if(matriz[0][0]=='X' and matriz[1][0]=='X' and matriz[2][0]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][1]=='X' and matriz[1][1]=='X' and matriz[2][1]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][2]=='X' and matriz[1][2]=='X' and matriz[2][2]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][0]=='O' and matriz[1][0]=='O' and matriz[2][0]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(matriz[0][1]=='O' and matriz[1][1]=='O' and matriz[2][1]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(matriz[0][2]=='O' and matriz[1][2]=='O' and matriz[2][2]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
#verificando as diagonais
if(matriz[0][0]=='X' and matriz[1][1]=='X' and matriz[2][2]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][2]=='X' and matriz[1][1]=='X' and matriz[2][0]=='X'):
print "\nJogador 1, Venceu !!"
break
else:
velha+=1
if(matriz[0][0]=='O' and matriz[1][1]=='O' and matriz[2][2]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(matriz[0][2]=='O' and matriz[1][1]=='O' and matriz[2][0]=='O'):
print "\nJogador 2, Venceu !!"
break
else:
velha+=1
if(velha==144):
print "\nDeu Velha !! "
break
op = raw_input ('Deseja jogar novamente ? "S"-sim ou "N"-não : ')
if op == 'S':
matriz = [['-','-','-'],
['-','-','-'],
['-','-','-']]
print'\n'
print'\t 0 1 2'
print'\t0 %s | %s | %s ' % (matriz[0][0], matriz[0][1], matriz[0][2])
print'\t ---------- '
print'\t1 %s | %s | %s ' % (matriz[1][0], matriz[1][1], matriz[1][2])
print'\t ----------'
print'\t2 %s | %s | %s ' % (matriz[2][0], matriz[2][1], matriz[2][2])
print'\n'
| 26.761905
| 73
| 0.457098
|
4867a0a7718d72d382b3e6d5c77d88fcfab8732d
| 1,519
|
py
|
Python
|
airflow_tutorial/serve.py
|
thejungwon/machine_learning_serving
|
1d1e003c65588dbe1383619e21dfe4b22afc713e
|
[
"MIT"
] | null | null | null |
airflow_tutorial/serve.py
|
thejungwon/machine_learning_serving
|
1d1e003c65588dbe1383619e21dfe4b22afc713e
|
[
"MIT"
] | null | null | null |
airflow_tutorial/serve.py
|
thejungwon/machine_learning_serving
|
1d1e003c65588dbe1383619e21dfe4b22afc713e
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask import request
from flask import render_template,url_for
from PIL import Image
import io
import base64
import cv2
import numpy as np
import urllib
from joblib import dump, load
from binascii import a2b_base64
import tensorflow as tf
try:
model = tf.keras.models.load_model('my_model')
except :
print("No model")
pass
#MARK
app = Flask(__name__)
app.config['TEMPLATES_AUTO_RELOAD'] = True
def resize_image(image):
image = cv2.resize(image, (28,28))
return image
def recognize_image(image, is_tf = False):
print("tensorflow")
image = image/255.0
return "TF",model.predict_classes( np.array( [image,] ))
@app.route('/')
def index():
return render_template("index.html")
@app.route('/recognize', methods=['GET','POST'])
def analyze():
if request.method == 'POST':
data_url = request.values.get('data')
model_type = request.values.get('type')
encoded_image = data_url.split(",")[1]
binary_data = a2b_base64(encoded_image)
data_io = io.BytesIO(binary_data)
img = Image.open(data_io)
image_np = np.array(img)
image_np = image_np[:, :, 3]
resized = resize_image(image_np)
model_type = False if model_type == "0" else True
a = recognize_image(resized, is_tf=model_type)
return str(a)
if __name__ == '__main__':
app.run(host="0.0.0.0", port=8000, debug=True)
| 22.014493
| 60
| 0.633311
|
0b31367605da909e976f86d44dbba95e1b46e957
| 3,289
|
py
|
Python
|
tools/taxonomy/poisson2test.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 2
|
2016-02-23T00:09:14.000Z
|
2019-02-11T07:48:44.000Z
|
tools/taxonomy/poisson2test.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | null | null | null |
tools/taxonomy/poisson2test.py
|
bopopescu/phyG
|
023f505b705ab953f502cbc55e90612047867583
|
[
"CC-BY-3.0"
] | 6
|
2015-05-27T13:09:50.000Z
|
2019-02-11T07:48:46.000Z
|
#!/usr/local/bin/python
import sys
from math import *
from rpy import *
if ((len(sys.argv)-1) != 6):
print 'too few parameters'
print 'usage: inputfile, col1, col2, d-value(not 0), p-val correction method(0 or 1)'
sys.exit()
try:
lines_arr = open(sys.argv[1]).readlines()
except IOError:
print'cannot open',sys.argv[1]
sys.exit()
try:
i = int(sys.argv[2]) #first column to compare
j = int(sys.argv[3]) #second colum to compare
d = float(sys.argv[4]) #correction factor
k = int(sys.argv[5]) #p-val correction method
outfile = open(sys.argv[6],'w') # output data
if (i>j):
print 'column order not correct col1 < col2'
print 'usage: inputfile, col1, col2, d-value, p-val correction method'
sys.exit()
try:
a = 1 / d
assert k in [0,1]
except ZeroDivisionError:
print 'd cannot be 0'
print 'usage: inputfile, col1, col2, d-value, p-val correction method'
sys.exit()
except:
print ' p-val correction should be 0 or 1 (0 = "bonferroni", 1 = "fdr")'
print 'usage: inputfile, col1, col2, d-value, p-val correction method'
sys.exit()
except ValueError:
print 'parameters are not integers'
print 'usage: inputfile, col1, col2, d-value, p-val correction method'
sys.exit()
fsize = len(lines_arr)
z1 = []
z2 = []
pz1 = []
pz2 = []
field = []
if d<1: # Z score calculation
for line in lines_arr:
line.strip()
field = line.split('\t')
x = int(field[j-1]) #input column 2
y = int(field[i-1]) #input column 1
if y>x:
z1.append(float((y - ((1/d)*x))/sqrt((1/d)*(x + y))))
z2.append(float((2*(sqrt(y+(3/8))-sqrt((1/d)*(x+(3/8)))))/sqrt(1+(1/d))))
else:
tmp_var1 = x
x = y
y = tmp_var1
z1.append(float((y - (d*x))/sqrt(d*(x + y))))
z2.append(float((2*(sqrt(y+(3/8))-sqrt(d*(x+(3/8)))))/sqrt(1+d)))
else: #d>1 Z score calculation
for line in lines_arr:
line.strip()
field = line.split('\t')
x = int(field[i-1]) #input column 1
y = int(field[j-1]) #input column 2
if y>x:
z1.append(float((y - (d*x))/sqrt(d*(x + y))))
z2.append(float((2*(sqrt(y+(3/8))-sqrt(d*(x+(3/8)))))/sqrt(1+d)))
else:
tmp_var2 = x
x = y
y = tmp_var2
z1.append(float((y - ((1/d)*x))/sqrt((1/d)*(x + y))))
z2.append(float((2*(sqrt(y+(3/8))-sqrt((1/d)*(x+(3/8)))))/sqrt(1+(1/d))))
# P-value caluculation for z1 and z2
for p in z1:
pz1.append(float(r.pnorm(-abs(float(p)))))
for q in z2:
pz2.append(float(r.pnorm(-abs(float(q)))))
# P-value correction for pz1 and pz2
if k == 0:
corrz1 = r.p_adjust(pz1,"bonferroni",fsize)
corrz2 = r.p_adjust(pz2,"bonferroni",fsize)
else:
corrz1 = r.p_adjust(pz1,"fdr",fsize)
corrz2 = r.p_adjust(pz2,"fdr",fsize)
#printing all columns
for n in range(fsize):
print >> outfile, "%s\t%4.3f\t%4.3f\t%8.6f\t%8.6f\t%8.6f\t%8.6f" %(lines_arr[n].strip(),z1[n],z2[n],pz1[n],pz2[n],corrz1[n],corrz2[n])
| 26.312
| 138
| 0.528732
|
e89544a81ee25b417c7a0746e18d235fee5a1837
| 3,072
|
py
|
Python
|
python/GafferSceneUI/ExternalProceduralUI.py
|
cwmartin/gaffer
|
1f8a0f75522105c9d5efefac6d55cb61c1038909
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneUI/ExternalProceduralUI.py
|
cwmartin/gaffer
|
1f8a0f75522105c9d5efefac6d55cb61c1038909
|
[
"BSD-3-Clause"
] | null | null | null |
python/GafferSceneUI/ExternalProceduralUI.py
|
cwmartin/gaffer
|
1f8a0f75522105c9d5efefac6d55cb61c1038909
|
[
"BSD-3-Clause"
] | null | null | null |
##########################################################################
#
# Copyright (c) 2014, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import IECore
import Gaffer
import GafferUI
import GafferScene
##########################################################################
# Metadata
##########################################################################
Gaffer.Metadata.registerNodeDescription(
GafferScene.ExternalProcedural,
"""References external geometry procedurals and archives.""",
"fileName",
"The path to the external procedural or archive.",
"bound",
"The bounding box of the external procedural or archive.",
"parameters",
"An arbitrary set of parameters to be passed to the external procedural."
)
##########################################################################
# Widgets
##########################################################################
GafferUI.PlugValueWidget.registerCreator(
GafferScene.ExternalProcedural,
"fileName",
lambda plug : GafferUI.PathPlugValueWidget( plug,
path = Gaffer.FileSystemPath( "/", filter = Gaffer.FileSystemPath.createStandardFilter() ),
pathChooserDialogueKeywords = {
"bookmarks" : GafferUI.Bookmarks.acquire( plug, category = "procedurals" ),
"leaf" : True,
},
)
)
GafferUI.PlugValueWidget.registerCreator( GafferScene.ExternalProcedural, "parameters", GafferUI.CompoundDataPlugValueWidget, collapsed=None )
| 37.925926
| 142
| 0.644857
|
96418f6991c0afdd700d76c2156de1c3ac151154
| 833
|
py
|
Python
|
src/python/gen_keyphrase/drop_paper_keywords.py
|
gaybro8777/CiteSeerX
|
49ecb503fb1ced8e2c2e94c3e100e5d4dc410ea6
|
[
"Apache-2.0"
] | 108
|
2015-01-05T09:22:50.000Z
|
2022-03-15T23:59:53.000Z
|
src/python/gen_keyphrase/drop_paper_keywords.py
|
gaybro8777/CiteSeerX
|
49ecb503fb1ced8e2c2e94c3e100e5d4dc410ea6
|
[
"Apache-2.0"
] | 26
|
2015-01-04T10:35:53.000Z
|
2021-11-01T04:41:19.000Z
|
src/python/gen_keyphrase/drop_paper_keywords.py
|
gaybro8777/CiteSeerX
|
49ecb503fb1ced8e2c2e94c3e100e5d4dc410ea6
|
[
"Apache-2.0"
] | 48
|
2015-01-16T02:03:48.000Z
|
2022-03-15T23:59:55.000Z
|
#!/usr/bin/env python
import gflags
import sys
from mysql_util import init_db, close_db
FLAGS = gflags.FLAGS
gflags.DEFINE_string('drop_keyword', '', '')
def usage(cmd):
print 'Usage:', cmd, \
'--drop_keyword="the term to be dropped"'
def check_args(argv):
try:
argv = FLAGS(argv)
except gflags.FlagsError:
print FLAGS
if FLAGS.drop_keyword == '':
usage(argv[0])
raise Exception('--drop_keyword cannot be empty')
def main(argv):
check_args(argv)
db, cursor = init_db()
try:
cursor.execute("""DELETE FROM paper_keywords_noun WHERE ngram=%s""", (FLAGS.drop_keyword))
db.commit()
except:
sys.stdout.write("""Error in deleting paper_keywords_noun where ngram = %s""" % (FLAGS.drop_keyword))
db.rollback()
close_db(db, cursor)
if __name__ == "__main__":
main(sys.argv)
| 20.317073
| 105
| 0.67587
|
8da63acbdad5e341cbdfa6d83e176c4f62410379
| 26,542
|
py
|
Python
|
soft-filter/pruning_cifar10_resnet.py
|
lehduong/NPTM
|
e1b8ec333db35e0e32e360151956b0f48f102735
|
[
"MIT"
] | 15
|
2021-09-20T09:39:52.000Z
|
2022-03-31T05:39:39.000Z
|
soft-filter/pruning_cifar10_resnet.py
|
lehduong/NPTM
|
e1b8ec333db35e0e32e360151956b0f48f102735
|
[
"MIT"
] | null | null | null |
soft-filter/pruning_cifar10_resnet.py
|
lehduong/NPTM
|
e1b8ec333db35e0e32e360151956b0f48f102735
|
[
"MIT"
] | null | null | null |
from __future__ import division
import os
import sys
import shutil
import time
import random
import argparse
import torch
import torch.backends.cudnn as cudnn
import torchvision.datasets as dset
import torchvision.transforms as transforms
from utils import AverageMeter, RecorderMeter, time_string, convert_secs2time
import models
import numpy as np
import wandb
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
parser = argparse.ArgumentParser(description='Trains ResNeXt on CIFAR or ImageNet',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('data_path', type=str, help='Path to dataset')
parser.add_argument('--dataset', type=str, choices=['cifar10', 'cifar100',
'imagenet', 'svhn', 'stl10'], help='Choose between Cifar10/100 and ImageNet.')
parser.add_argument('--arch', metavar='ARCH', default='resnet18', choices=model_names,
help='model architecture: ' + ' | '.join(model_names) + ' (default: resnext29_8_64)')
# Optimization options
parser.add_argument('--epochs', type=int, default=300,
help='Number of epochs to train.')
parser.add_argument('--finetune_epochs', type=int, default=50,
help='Number of epochs to retrain.')
parser.add_argument('--batch_size', type=int, default=128, help='Batch size.')
parser.add_argument('--learning_rate', type=float,
default=0.1, help='The Learning Rate.')
parser.add_argument('--momentum', type=float, default=0.9, help='Momentum.')
parser.add_argument('--decay', type=float, default=0.0005,
help='Weight decay (L2 penalty).')
parser.add_argument('--schedule', type=int, nargs='+',
default=[150, 225], help='Decrease learning rate at these epochs.')
parser.add_argument('--gammas', type=float, nargs='+', default=[
0.1, 0.1], help='LR is multiplied by gamma on schedule, number of gammas should be equal to schedule')
# Checkpoints
parser.add_argument('--print_freq', default=200, type=int,
metavar='N', help='print frequency (default: 200)')
parser.add_argument('--save_path', type=str, default='./',
help='Folder to save checkpoints and log.')
parser.add_argument('--resume', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--start_epoch', default=0, type=int,
metavar='N', help='manual epoch number (useful on restarts)')
parser.add_argument('--evaluate', dest='evaluate',
action='store_true', help='evaluate model on validation set')
# Acceleration
parser.add_argument('--ngpu', type=int, default=1, help='0 = CPU.')
parser.add_argument('--workers', type=int, default=2,
help='number of data loading workers (default: 2)')
# random seed
parser.add_argument('--manualSeed', type=int, help='manual seed')
# compress rate
parser.add_argument('--rate', type=float, default=0.9,
help='compress rate of model')
parser.add_argument('--layer_begin', type=int, default=1,
help='compress layer of model')
parser.add_argument('--layer_end', type=int, default=1,
help='compress layer of model')
parser.add_argument('--layer_inter', type=int, default=1,
help='compress layer of model')
parser.add_argument('--epoch_prune', type=int, default=1,
help='compress layer of model')
parser.add_argument('--use_state_dict', dest='use_state_dict',
action='store_true', help='use state dcit or not')
parser.add_argument('--wandb_name', default='resnet_56_standard', type=str,
help='name of wandb run')
parser.add_argument('--finetune-schedule', type=int, nargs='+', default=[18, 32],
help='Decrease learning rate at these epochs.')
parser.add_argument('--init_lr', type=float, default=0.0008,
help='initialized learning rate when doing warm up (default: 0.008)')
parser.add_argument('--gamma', type=float, default=0.1,
help='gamma of step learning rate decay (default: 0.1)')
parser.add_argument('--num_warmup', type=int, default=4,
help='number of epochs to increase learning rate (default: 4)')
parser.add_argument('--warmup_strategy', type=str, default='cos',
help='warmup strategy (default: cos)')
parser.add_argument(
'--div_factor',
type=float,
default=125,
help='div factor of OneCycle Learning rate Schedule (default: 10)')
parser.add_argument(
'--final_div_factor',
type=float,
default=100,
help='final div factor of OneCycle Learning rate Schedule (default: 100)')
parser.add_argument(
'--pct_start',
type=float,
default=0.1,
help='pct_start of OneCycle Learning rate Schedule (default: 0.1)')
parser.add_argument('--use_llr', dest='use_llr', action='store_true')
parser.add_argument('--use_onecycle', dest='use_onecycle', action='store_true')
parser.add_argument('--random_mask', dest='random_mask', action='store_true')
parser.set_defaults(use_llr=False)
parser.set_defaults(use_onecycle=False)
parser.set_defaults(random_mask=False)
args = parser.parse_args()
args.use_cuda = args.ngpu > 0 and torch.cuda.is_available()
if args.manualSeed is None:
args.manualSeed = random.randint(1, 10000)
random.seed(args.manualSeed)
torch.manual_seed(args.manualSeed)
if args.use_cuda:
torch.cuda.manual_seed_all(args.manualSeed)
cudnn.benchmark = True
wandb.init(
name=args.wandb_name,
project='Soft_Filters_Pruning',
config={
**vars(args)}
)
def main():
# Init logger
if not os.path.isdir(args.save_path):
os.makedirs(args.save_path)
log = open(os.path.join(args.save_path,
'log_seed_{}.txt'.format(args.manualSeed)), 'w')
print_log('save path : {}'.format(args.save_path), log)
state = {k: v for k, v in args._get_kwargs()}
print_log(state, log)
print_log("Random Seed: {}".format(args.manualSeed), log)
print_log("python version : {}".format(
sys.version.replace('\n', ' ')), log)
print_log("torch version : {}".format(torch.__version__), log)
print_log("cudnn version : {}".format(
torch.backends.cudnn.version()), log)
print_log("Compress Rate: {}".format(args.rate), log)
print_log("Layer Begin: {}".format(args.layer_begin), log)
print_log("Layer End: {}".format(args.layer_end), log)
print_log("Layer Inter: {}".format(args.layer_inter), log)
print_log("Epoch prune: {}".format(args.epoch_prune), log)
# Init dataset
if not os.path.isdir(args.data_path):
os.makedirs(args.data_path)
if args.dataset == 'cifar10':
mean = [x / 255 for x in [125.3, 123.0, 113.9]]
std = [x / 255 for x in [63.0, 62.1, 66.7]]
elif args.dataset == 'cifar100':
mean = [x / 255 for x in [129.3, 124.1, 112.4]]
std = [x / 255 for x in [68.2, 65.4, 70.4]]
else:
assert False, "Unknow dataset : {}".format(args.dataset)
train_transform = transforms.Compose(
[transforms.RandomHorizontalFlip(), transforms.RandomCrop(32, padding=4), transforms.ToTensor(),
transforms.Normalize(mean, std)])
test_transform = transforms.Compose(
[transforms.ToTensor(), transforms.Normalize(mean, std)])
if args.dataset == 'cifar10':
train_data = dset.CIFAR10(
args.data_path, train=True, transform=train_transform, download=True)
test_data = dset.CIFAR10(
args.data_path, train=False, transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'cifar100':
train_data = dset.CIFAR100(
args.data_path, train=True, transform=train_transform, download=True)
test_data = dset.CIFAR100(
args.data_path, train=False, transform=test_transform, download=True)
num_classes = 100
elif args.dataset == 'svhn':
train_data = dset.SVHN(args.data_path, split='train',
transform=train_transform, download=True)
test_data = dset.SVHN(args.data_path, split='test',
transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'stl10':
train_data = dset.STL10(
args.data_path, split='train', transform=train_transform, download=True)
test_data = dset.STL10(args.data_path, split='test',
transform=test_transform, download=True)
num_classes = 10
elif args.dataset == 'imagenet':
assert False, 'Do not finish imagenet code'
else:
assert False, 'Do not support dataset : {}'.format(args.dataset)
train_loader = torch.utils.data.DataLoader(train_data, batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, pin_memory=True)
test_loader = torch.utils.data.DataLoader(test_data, batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=True)
print_log("=> creating model '{}'".format(args.arch), log)
# Init model, criterion, and optimizer
net = models.__dict__[args.arch](num_classes)
print_log("=> network :\n {}".format(net), log)
net = torch.nn.DataParallel(net, device_ids=list(range(args.ngpu)))
# define loss function (criterion) and optimizer
criterion = torch.nn.CrossEntropyLoss()
optimizer = torch.optim.SGD(net.parameters(), state['learning_rate'], momentum=state['momentum'],
weight_decay=state['decay'], nesterov=True)
lr_scheduler = None
if args.use_cuda:
net.cuda()
criterion.cuda()
recorder = RecorderMeter(args.epochs+args.finetune_epochs)
# optionally resume from a checkpoint
if args.resume:
if os.path.isfile(args.resume):
print_log("=> loading checkpoint '{}'".format(args.resume), log)
checkpoint = torch.load(args.resume)
recorder = checkpoint['recorder']
args.start_epoch = checkpoint['epoch']
if args.use_state_dict:
net.load_state_dict(checkpoint['state_dict'])
else:
net = checkpoint['state_dict']
optimizer.load_state_dict(checkpoint['optimizer'])
print_log("=> loaded checkpoint '{}' (epoch {})" .format(
args.resume, checkpoint['epoch']), log)
else:
print_log("=> no checkpoint found at '{}'".format(args.resume), log)
else:
print_log(
"=> do not use any checkpoint for {} model".format(args.arch), log)
if args.evaluate:
time1 = time.time()
validate(test_loader, net, criterion, log)
time2 = time.time()
print('function took %0.3f ms' % ((time2-time1)*1000.0))
return
m = Mask(net, args.random_mask)
m.init_length()
comp_rate = args.rate
print("-"*10+"one epoch begin"+"-"*10)
print("the compression rate now is %f" % comp_rate)
val_acc_1, val_los_1 = validate(test_loader, net, criterion, log)
print(" accu before is: %.3f %%" % val_acc_1)
m.model = net
m.init_mask(comp_rate)
# m.if_zero()
m.do_mask()
net = m.model
# m.if_zero()
if args.use_cuda:
net = net.cuda()
val_acc, val_loss = validate(test_loader, net, criterion, log)
print(" accu after is: %s %%" % val_acc)
# Main loop
start_time = time.time()
epoch_time = AverageMeter()
best = 0
for epoch in range(args.start_epoch, args.epochs):
current_learning_rate = adjust_learning_rate(
optimizer, epoch, args.gammas, args.schedule)
need_hour, need_mins, need_secs = convert_secs2time(
epoch_time.avg * (args.epochs-epoch))
need_time = '[Need: {:02d}:{:02d}:{:02d}]'.format(
need_hour, need_mins, need_secs)
print_log('\n==>>{:s} [Epoch={:03d}/{:03d}] {:s} [learning_rate={:6.4f}]'.format(time_string(), epoch, args.epochs, need_time, current_learning_rate)
+ ' [Best : Accuracy={:.2f}, Error={:.2f}]'.format(recorder.max_accuracy(False), 100-recorder.max_accuracy(False)), log)
# train for one epoch
train_acc, train_los = train(
train_loader, net, criterion, optimizer, epoch, log, lr_scheduler)
# evaluate on validation set
val_acc_1, val_los_1 = validate(test_loader, net, criterion, log)
if (epoch % args.epoch_prune == 0 or epoch == args.epochs-1):
m.model = net
m.if_zero()
m.init_mask(comp_rate)
m.do_mask()
m.if_zero()
net = m.model
if args.use_cuda:
net = net.cuda()
val_acc_2, val_loss_2 = validate(test_loader, net, criterion, log)
is_best = recorder.update(
epoch, train_los, train_acc, val_loss_2, val_acc_2)
if is_best:
best = val_acc_2
wandb.log({'top1': val_acc_2, 'best_top1': recorder.max_accuracy(
False), 'lr': current_learning_rate})
save_checkpoint({
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': net,
'recorder': recorder,
'optimizer': optimizer.state_dict(),
}, is_best, args.save_path, 'checkpoint.pth.tar')
# measure elapsed time
epoch_time.update(time.time() - start_time)
start_time = time.time()
#recorder.plot_curve( os.path.join(args.save_path, 'curve.png') )
###############################################################################
# Fine-tuning
if args.finetune_epochs > 0:
print("-"*10+"start fine-tuning now"+"-"*10)
print("the number of finetuning epochs is %f" % args.finetune_epochs)
start_time = time.time()
epoch_time = AverageMeter()
recorder.reset(args.finetune_epochs)
# create a new optimizer to reset all momentum
# since standard retrain usually ignore statedict of optimizer of original training
# also foster network to converge to new optima
# TODO: Fixed hardcode hyperparam in scheduler and optimizer
optimizer = torch.optim.SGD(net.parameters(), 0.0008, momentum=state['momentum'],
weight_decay=state['decay'], nesterov=True)
if args.use_onecycle:
lr_scheduler = torch.optim.lr_scheduler.OneCycleLR(optimizer, max_lr=0.1, div_factor=args.div_factor,
epochs=args.finetune_epochs, steps_per_epoch=len(train_loader), pct_start=args.pct_start,
final_div_factor=args.final_div_factor)
for epoch in range(0, args.finetune_epochs):
need_hour, need_mins, need_secs = convert_secs2time(
epoch_time.avg * (args.finetune_epochs))
need_time = '[Need: {:02d}:{:02d}:{:02d}]'.format(
need_hour, need_mins, need_secs)
current_learning_rate = optimizer.param_groups[0]['lr']
print_log('\n==>>{:s} [Epoch={:03d}/{:03d}] {:s} [learning_rate={:6.4f}]'.format(time_string(), epoch, args.finetune_epochs, need_time, current_learning_rate)
+ ' [Best : Accuracy={:.2f}, Error={:.2f}]'.format(recorder.max_accuracy(False), 100-recorder.max_accuracy(False)), log)
# train for one epoch
train_acc, train_los = train(
train_loader, net, criterion, optimizer, epoch, log, lr_scheduler)
# make sure to not update weights equal 0 i.e. keep the mask during fine-tuning
m.model = net
m.if_zero()
m.do_mask()
m.if_zero()
net = m.model
if args.use_cuda:
net = net.cuda()
# evaluate on validation set
val_acc, val_loss = validate(test_loader, net, criterion, log)
is_best = recorder.update(
epoch, train_los, train_acc, val_loss, val_acc)
if is_best:
best = val_acc
wandb.log({'top1': val_acc, 'best_top1': recorder.max_accuracy(False),
'lr': current_learning_rate})
save_checkpoint({
'epoch': epoch + 1,
'arch': args.arch,
'state_dict': net,
'recorder': recorder,
'optimizer': optimizer.state_dict(),
}, is_best, args.save_path, 'finetuned.pth.tar')
# measure elapsed time
epoch_time.update(time.time() - start_time)
start_time = time.time()
#recorder.plot_curve( os.path.join(args.save_path, 'curve.png') )
log.close()
wandb.save(os.path.join(args.save_path, '*'))
# train function (forward, backward, update)
def train(train_loader, model, criterion, optimizer, epoch, log, lr_scheduler):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to train mode
model.train()
end = time.time()
for i, (input, target) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
if args.use_cuda:
target = target.cuda()
input = input.cuda()
input_var = torch.autograd.Variable(input)
target_var = torch.autograd.Variable(target)
# compute output
output = model(input_var)
loss = criterion(output, target_var)
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data.item(), input.size(0))
top1.update(prec1.item(), input.size(0))
top5.update(prec5.item(), input.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
if lr_scheduler:
lr_scheduler.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print_log(' Epoch: [{:03d}][{:03d}/{:03d}] '
'Time {batch_time.val:.3f} ({batch_time.avg:.3f}) '
'Data {data_time.val:.3f} ({data_time.avg:.3f}) '
'Loss {loss.val:.4f} ({loss.avg:.4f}) '
'Prec@1 {top1.val:.3f} ({top1.avg:.3f}) '
'Prec@5 {top5.val:.3f} ({top5.avg:.3f}) '.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses, top1=top1, top5=top5) + time_string(), log)
print_log(' **Train** Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f} Error@1 {error1:.3f}'.format(
top1=top1, top5=top5, error1=100-top1.avg), log)
return top1.avg, losses.avg
def validate(val_loader, model, criterion, log):
losses = AverageMeter()
top1 = AverageMeter()
top5 = AverageMeter()
# switch to evaluate mode
model.eval()
for i, (input, target) in enumerate(val_loader):
if args.use_cuda:
target = target.cuda()
input = input.cuda()
input_var = torch.autograd.Variable(input, volatile=True)
target_var = torch.autograd.Variable(target, volatile=True)
# compute output
output = model(input_var)
loss = criterion(output, target_var)
# measure accuracy and record loss
prec1, prec5 = accuracy(output.data, target, topk=(1, 5))
losses.update(loss.data.item(), input.size(0))
top1.update(prec1.item(), input.size(0))
top5.update(prec5.item(), input.size(0))
print_log(' **Test** Prec@1 {top1.avg:.3f} Prec@5 {top5.avg:.3f} Error@1 {error1:.3f}'.format(
top1=top1, top5=top5, error1=100-top1.avg), log)
return top1.avg, losses.avg
def print_log(print_string, log):
print("{}".format(print_string))
log.write('{}\n'.format(print_string))
log.flush()
def save_checkpoint(state, is_best, save_path, filename):
filename = os.path.join(save_path, filename)
torch.save(state, filename)
if is_best:
bestname = os.path.join(save_path, 'model_best.pth.tar')
shutil.copyfile(filename, bestname)
def adjust_learning_rate(optimizer, epoch, gammas, schedule):
"""Sets the learning rate to the initial LR decayed by 10 every 30 epochs"""
lr = args.learning_rate
assert len(gammas) == len(
schedule), "length of gammas and schedule should be equal"
for (gamma, step) in zip(gammas, schedule):
if (epoch >= step):
lr = lr * gamma
else:
break
for param_group in optimizer.param_groups:
param_group['lr'] = lr
return lr
def accuracy(output, target, topk=(1,)):
"""Computes the precision@k for the specified values of k"""
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].view(-1).float().sum(0)
res.append(correct_k.mul_(100.0 / batch_size))
return res
class Mask:
def __init__(self, model, random_mask=False):
self.model_size = {}
self.model_length = {}
self.compress_rate = {}
self.mat = {}
self.model = model
self.mask_index = []
self.random_mask = random_mask
def get_codebook(self, weight_torch, compress_rate, length):
weight_vec = weight_torch.view(length)
weight_np = weight_vec.cpu().numpy()
weight_abs = np.abs(weight_np)
weight_sort = np.sort(weight_abs)
threshold = weight_sort[int(length * (1-compress_rate))]
weight_np[weight_np <= -threshold] = 1
weight_np[weight_np >= threshold] = 1
weight_np[weight_np != 1] = 0
print("codebook done")
return weight_np
def get_filter_codebook(self, weight_torch, compress_rate, length):
"""
Return a 'codebook' determined if a filter of conv layer will be pruned or not
"""
codebook = np.ones(length)
if len(weight_torch.size()) == 4:
filter_pruned_num = int(weight_torch.size()[0]*(1-compress_rate))
weight_vec = weight_torch.view(weight_torch.size()[0], -1)
norm2 = torch.norm(weight_vec, 2, 1)
norm2_np = norm2.cpu().numpy()
if self.random_mask:
norm2_np = np.random.random_sample(norm2_np.shape)
filter_index = norm2_np.argsort()[:filter_pruned_num]
# norm1_sort = np.sort(norm1_np)
# threshold = norm1_sort[int (weight_torch.size()[0] * (1-compress_rate) )]
kernel_length = weight_torch.size(
)[1] * weight_torch.size()[2] * weight_torch.size()[3]
for x in range(0, len(filter_index)):
codebook[filter_index[x] *
kernel_length: (filter_index[x]+1) * kernel_length] = 0
else:
pass
return codebook
def convert2tensor(self, x):
x = torch.FloatTensor(x)
return x
def init_length(self):
for index, item in enumerate(self.model.parameters()):
self.model_size[index] = item.size()
for index1 in self.model_size:
for index2 in range(0, len(self.model_size[index1])):
if index2 == 0:
self.model_length[index1] = self.model_size[index1][0]
else:
self.model_length[index1] *= self.model_size[index1][index2]
def init_rate(self, layer_rate):
"""
Initialized compression rate and index of going-to-be pruned layers
"""
for index, item in enumerate(self.model.parameters()):
self.compress_rate[index] = 1
for key in range(args.layer_begin, args.layer_end + 1, args.layer_inter):
self.compress_rate[key] = layer_rate
# different setting for different architecture
if args.arch == 'resnet20':
last_index = 57
elif args.arch == 'resnet32':
last_index = 93
elif args.arch == 'resnet56':
last_index = 165
elif args.arch == 'resnet110':
last_index = 327
self.mask_index = [x for x in range(0, last_index, 3)]
# self.mask_index = [x for x in range (0,330,3)]
def init_mask(self, layer_rate):
"""
Compute the mask for each layer (stored in self.mat)
"""
self.init_rate(layer_rate)
for index, item in enumerate(self.model.parameters()):
if(index in self.mask_index):
self.mat[index] = self.get_filter_codebook(
item.data, self.compress_rate[index], self.model_length[index])
self.mat[index] = self.convert2tensor(self.mat[index])
if args.use_cuda:
self.mat[index] = self.mat[index].cuda()
print("mask Ready")
def do_mask(self):
"""
Zero out weights of network according to the mask
"""
for index, item in enumerate(self.model.parameters()):
if(index in self.mask_index):
a = item.data.view(self.model_length[index])
b = a * self.mat[index]
item.data = b.view(self.model_size[index])
print("mask Done")
def if_zero(self):
"""
Print number of nonzero weights of network
"""
for index, item in enumerate(self.model.parameters()):
# if(index in self.mask_index):
if(index == 0):
a = item.data.view(self.model_length[index])
b = a.cpu().numpy()
print("number of nonzero weight is %d, zero is %d" %
(np.count_nonzero(b), len(b) - np.count_nonzero(b)))
if __name__ == '__main__':
main()
| 40.033183
| 170
| 0.602178
|
b4df69b25526776c01b0eda9fd7f618f5e1f9893
| 3,565
|
py
|
Python
|
time_sequence_models/stackabuse/time_series_prediction_lstm.py
|
franpena-kth/learning-deep-learning
|
9cd287b602dee1358672c4189445721a9c24f107
|
[
"MIT"
] | null | null | null |
time_sequence_models/stackabuse/time_series_prediction_lstm.py
|
franpena-kth/learning-deep-learning
|
9cd287b602dee1358672c4189445721a9c24f107
|
[
"MIT"
] | null | null | null |
time_sequence_models/stackabuse/time_series_prediction_lstm.py
|
franpena-kth/learning-deep-learning
|
9cd287b602dee1358672c4189445721a9c24f107
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import seaborn as sns
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
flight_data = sns.load_dataset("flights")
flight_data.head()
fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = 15
fig_size[1] = 5
plt.rcParams["figure.figsize"] = fig_size
plt.title('Month vs Passenger')
plt.ylabel('Total Passengers')
plt.xlabel('Months')
plt.grid(True)
plt.autoscale(axis='x',tight=True)
plt.plot(flight_data['passengers'])
all_data = flight_data['passengers'].values.astype(float)
test_data_size = 12
train_data = all_data[:-test_data_size]
test_data = all_data[-test_data_size:]
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler(feature_range=(-1, 1))
train_data_normalized = scaler.fit_transform(train_data .reshape(-1, 1))
train_data_normalized = torch.FloatTensor(train_data_normalized).view(-1)
train_window = 12
def create_inout_sequences(input_data, tw):
inout_seq = []
L = len(input_data)
for i in range(L-tw):
train_seq = input_data[i:i+tw]
train_label = input_data[i+tw:i+tw+1]
inout_seq.append((train_seq ,train_label))
return inout_seq
train_inout_seq = create_inout_sequences(train_data_normalized, train_window)
class LSTM(nn.Module):
def __init__(self, input_size=1, hidden_layer_size=100, output_size=1):
super().__init__()
self.hidden_layer_size = hidden_layer_size
self.lstm = nn.LSTM(input_size, hidden_layer_size)
self.linear = nn.Linear(hidden_layer_size, output_size)
self.hidden_cell = (torch.zeros(1,1,self.hidden_layer_size),
torch.zeros(1,1,self.hidden_layer_size))
def forward(self, input_seq):
# print('input_seq', input_seq.shape)
lstm_out, self.hidden_cell = self.lstm(input_seq.view(len(input_seq) ,1, -1), self.hidden_cell)
predictions = self.linear(lstm_out.view(len(input_seq), -1))
return predictions[-1]
model = LSTM()
loss_function = nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
epochs = 150
for i in range(epochs):
for seq, labels in train_inout_seq:
optimizer.zero_grad()
model.hidden_cell = (torch.zeros(1, 1, model.hidden_layer_size),
torch.zeros(1, 1, model.hidden_layer_size))
y_pred = model(seq)
single_loss = loss_function(y_pred, labels)
single_loss.backward()
optimizer.step()
if i%25 == 1:
print(f'epoch: {i:3} loss: {single_loss.item():10.8f}')
print(f'epoch: {i:3} loss: {single_loss.item():10.10f}')
fut_pred = 12
test_inputs = train_data_normalized[-train_window:].tolist()
model.eval()
for i in range(fut_pred):
seq = torch.FloatTensor(test_inputs[-train_window:])
with torch.no_grad():
model.hidden = (torch.zeros(1, 1, model.hidden_layer_size),
torch.zeros(1, 1, model.hidden_layer_size))
test_inputs.append(model(seq).item())
actual_predictions = scaler.inverse_transform(np.array(test_inputs[train_window:] ).reshape(-1, 1))
print(actual_predictions)
x = np.arange(132, 144, 1)
print(x)
plt.title('Month vs Passenger')
plt.ylabel('Total Passengers')
plt.grid(True)
plt.autoscale(axis='x', tight=True)
plt.plot(flight_data['passengers'])
plt.plot(x,actual_predictions)
plt.show()
plt.title('Month vs Passenger')
plt.ylabel('Total Passengers')
plt.grid(True)
plt.autoscale(axis='x', tight=True)
plt.plot(flight_data['passengers'][-train_window:])
plt.plot(x,actual_predictions)
plt.show()
| 27.21374
| 103
| 0.702384
|
6799e5f52c48c801bcc1b710dcf055c275a993b6
| 1,989
|
py
|
Python
|
308-Range-Sum-Query-2D---Mutable/solution.py
|
Tanych/CodeTracking
|
86f1cb98de801f58c39d9a48ce9de12df7303d20
|
[
"MIT"
] | null | null | null |
308-Range-Sum-Query-2D---Mutable/solution.py
|
Tanych/CodeTracking
|
86f1cb98de801f58c39d9a48ce9de12df7303d20
|
[
"MIT"
] | null | null | null |
308-Range-Sum-Query-2D---Mutable/solution.py
|
Tanych/CodeTracking
|
86f1cb98de801f58c39d9a48ce9de12df7303d20
|
[
"MIT"
] | null | null | null |
class NumMatrix(object):
def __init__(self, matrix):
"""
initialize your data structure here.
:type matrix: List[List[int]]
"""
self.row=len(matrix)
if self.row==0: return
self.col=len(matrix[0])
self.bintree2d=[[0 for _ in xrange(self.col+1)] for _ in xrange(self.row+1)]
self.nums=[[0 for _ in xrange(self.col)] for _ in xrange(self.row)]
for i in xrange(self.row):
for j in xrange(self.col):
self.update(i,j,matrix[i][j])
def update(self, row, col, val):
"""
update the element at matrix[row,col] to val.
:type row: int
:type col: int
:type val: int
:rtype: void
"""
if self.row==0 or self.col==0:
return
diff=val-self.nums[row][col]
self.nums[row][col]=val
i=row+1
while i<self.row+1:
j=col+1
while j<self.col+1:
self.bintree2d[i][j]+=diff
j+=j&(-j)
i+=i&(-i)
def sumrange(self,row,col):
sumres=0
i=row
while i>0:
j=col
while j>0:
sumres+=self.bintree2d[i][j]
j-=j&(-j)
i-=i&(-i)
return sumres
def sumRegion(self, row1, col1, row2, col2):
"""
sum of elements matrix[(row1,col1)..(row2,col2)], inclusive.
:type row1: int
:type col1: int
:type row2: int
:type col2: int
:rtype: int
"""
if self.row==0 or self.col==0:
return 0
return self.sumrange(row2+1,col2+1)+self.sumrange(row1,col1)-\
self.sumrange(row1,col2+1)-self.sumrange(row2+1,col1)
# Your NumMatrix object will be instantiated and called as such:
# numMatrix = NumMatrix(matrix)
# numMatrix.sumRegion(0, 1, 2, 3)
# numMatrix.update(1, 1, 10)
# numMatrix.sumRegion(1, 2, 3, 4)
| 29.686567
| 84
| 0.501257
|
fc1cae9b62361384f80235ed39539aadfa8000db
| 16,857
|
py
|
Python
|
tests/validators.py
|
BabisK/microk8s
|
c43ce5c961a3b577aca6f8cf76fada964c2334c9
|
[
"Apache-2.0"
] | 6,286
|
2018-07-20T15:55:22.000Z
|
2022-02-22T06:59:26.000Z
|
tests/validators.py
|
BabisK/microk8s
|
c43ce5c961a3b577aca6f8cf76fada964c2334c9
|
[
"Apache-2.0"
] | 2,230
|
2018-07-20T14:14:33.000Z
|
2022-02-22T08:22:21.000Z
|
tests/validators.py
|
BabisK/microk8s
|
c43ce5c961a3b577aca6f8cf76fada964c2334c9
|
[
"Apache-2.0"
] | 706
|
2018-07-22T16:35:32.000Z
|
2022-02-17T06:32:10.000Z
|
import time
import os
import re
import requests
import platform
import yaml
import subprocess
from utils import (
kubectl,
wait_for_pod_state,
kubectl_get,
wait_for_installation,
docker,
update_yaml_with_arch,
run_until_success,
)
def validate_dns_dashboard():
"""
Validate the dashboard addon by trying to access the kubernetes dashboard.
The dashboard will return an HTML indicating that it is up and running.
"""
wait_for_pod_state("", "kube-system", "running", label="k8s-app=kubernetes-dashboard")
wait_for_pod_state("", "kube-system", "running", label="k8s-app=dashboard-metrics-scraper")
attempt = 30
while attempt > 0:
try:
output = kubectl(
"get "
"--raw "
"/api/v1/namespaces/kube-system/services/https:kubernetes-dashboard:/proxy/"
)
if "Kubernetes Dashboard" in output:
break
except subprocess.CalledProcessError:
pass
time.sleep(10)
attempt -= 1
assert attempt > 0
def validate_storage():
"""
Validate storage by creating a PVC.
"""
wait_for_pod_state("", "kube-system", "running", label="k8s-app=hostpath-provisioner")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "pvc.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("hostpath-test-pod", "default", "running")
attempt = 50
while attempt >= 0:
output = kubectl("get pvc")
if "Bound" in output:
break
time.sleep(2)
attempt -= 1
# Make sure the test pod writes data sto the storage
found = False
for root, dirs, files in os.walk("/var/snap/microk8s/common/default-storage"):
for file in files:
if file == "dates":
found = True
assert found
assert "myclaim" in output
assert "Bound" in output
kubectl("delete -f {}".format(manifest))
def common_ingress():
"""
Perform the Ingress validations that are common for all
the Ingress controllers.
"""
attempt = 50
while attempt >= 0:
output = kubectl("get ing")
if "microbot.127.0.0.1.nip.io" in output:
break
time.sleep(5)
attempt -= 1
assert "microbot.127.0.0.1.nip.io" in output
service_ok = False
attempt = 50
while attempt >= 0:
try:
resp = requests.get("http://microbot.127.0.0.1.nip.io/")
if resp.status_code == 200 and "microbot.png" in resp.content.decode("utf-8"):
service_ok = True
break
except requests.RequestException:
time.sleep(5)
attempt -= 1
assert service_ok
def validate_ingress():
"""
Validate ingress by creating a ingress rule.
"""
daemonset = kubectl("get ds")
if "nginx-ingress-microk8s-controller" in daemonset:
wait_for_pod_state("", "default", "running", label="app=default-http-backend")
wait_for_pod_state("", "default", "running", label="name=nginx-ingress-microk8s")
else:
wait_for_pod_state("", "ingress", "running", label="name=nginx-ingress-microk8s")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "ingress.yaml")
update_yaml_with_arch(manifest)
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=microbot")
common_ingress()
kubectl("delete -f {}".format(manifest))
def validate_ambassador():
"""
Validate the Ambassador API Gateway by creating a ingress rule.
"""
if platform.machine() != "x86_64":
print("Ambassador tests are only relevant in x86 architectures")
return
wait_for_pod_state("", "ambassador", "running", label="product=aes")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "ingress.yaml")
update_yaml_with_arch(manifest)
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=microbot")
# `Ingress`es must be annotatated for being recognized by Ambassador
kubectl("annotate ingress microbot-ingress-nip kubernetes.io/ingress.class=ambassador")
common_ingress()
kubectl("delete -f {}".format(manifest))
def validate_gpu():
"""
Validate gpu by trying a cuda-add.
"""
if platform.machine() != "x86_64":
print("GPU tests are only relevant in x86 architectures")
return
wait_for_pod_state("", "kube-system", "running", label="name=nvidia-device-plugin-ds")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "cuda-add.yaml")
get_pod = kubectl_get("po")
if "cuda-vector-add" in str(get_pod):
# Cleanup
kubectl("delete -f {}".format(manifest))
time.sleep(10)
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("cuda-vector-add", "default", "terminated")
result = kubectl("logs pod/cuda-vector-add")
assert "PASSED" in result
def validate_inaccel():
"""
Validate inaccel
"""
if platform.machine() != "x86_64":
print("FPGA tests are only relevant in x86 architectures")
return
wait_for_pod_state("", "kube-system", "running", label="app.kubernetes.io/name=fpga-operator")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "inaccel.yaml")
get_pod = kubectl_get("po")
if "inaccel-vadd" in str(get_pod):
# Cleanup
kubectl("delete -f {}".format(manifest))
time.sleep(10)
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("inaccel-vadd", "default", "terminated")
result = kubectl("logs pod/inaccel-vadd")
assert "PASSED" in result
def validate_istio():
"""
Validate istio by deploying the bookinfo app.
"""
if platform.machine() != "x86_64":
print("Istio tests are only relevant in x86 architectures")
return
wait_for_installation()
istio_services = [
"pilot",
"egressgateway",
"ingressgateway",
]
for service in istio_services:
wait_for_pod_state("", "istio-system", "running", label="istio={}".format(service))
cmd = "/snap/bin/microk8s.istioctl verify-install"
return run_until_success(cmd, timeout_insec=900, err_out="no")
def validate_knative():
"""
Validate Knative by deploying the helloworld-go app.
"""
if platform.machine() != "x86_64":
print("Knative tests are only relevant in x86 architectures")
return
wait_for_installation()
knative_services = [
"activator",
"autoscaler",
"controller",
]
for service in knative_services:
wait_for_pod_state("", "knative-serving", "running", label="app={}".format(service))
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "knative-helloworld.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="serving.knative.dev/service=helloworld-go")
kubectl("delete -f {}".format(manifest))
def validate_registry():
"""
Validate the private registry.
"""
wait_for_pod_state("", "container-registry", "running", label="app=registry")
pvc_stdout = kubectl("get pvc registry-claim -n container-registry -o yaml")
pvc_yaml = yaml.safe_load(pvc_stdout)
storage = pvc_yaml["spec"]["resources"]["requests"]["storage"]
assert re.match("(^[2-9][0-9]{1,}|^[1-9][0-9]{2,})(Gi$)", storage)
docker("pull busybox")
docker("tag busybox localhost:32000/my-busybox")
docker("push localhost:32000/my-busybox")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "bbox-local.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("busybox", "default", "running")
output = kubectl("describe po busybox")
assert "localhost:32000/my-busybox" in output
kubectl("delete -f {}".format(manifest))
def validate_forward():
"""
Validate ports are forwarded
"""
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "nginx-pod.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=nginx")
os.system("killall kubectl")
os.system("/snap/bin/microk8s.kubectl port-forward pod/nginx 5123:80 &")
attempt = 10
while attempt >= 0:
try:
resp = requests.get("http://localhost:5123")
if resp.status_code == 200:
break
except requests.RequestException:
pass
attempt -= 1
time.sleep(2)
assert resp.status_code == 200
def validate_metrics_server():
"""
Validate the metrics server works
"""
wait_for_pod_state("", "kube-system", "running", label="k8s-app=metrics-server")
attempt = 30
while attempt > 0:
try:
output = kubectl("get --raw /apis/metrics.k8s.io/v1beta1/pods")
if "PodMetricsList" in output:
break
except subprocess.CalledProcessError:
pass
time.sleep(10)
attempt -= 1
assert attempt > 0
def validate_prometheus():
"""
Validate the prometheus operator
"""
if platform.machine() != "x86_64":
print("Prometheus tests are only relevant in x86 architectures")
return
wait_for_pod_state("prometheus-k8s-0", "monitoring", "running", timeout_insec=1200)
wait_for_pod_state("alertmanager-main-0", "monitoring", "running", timeout_insec=1200)
def validate_fluentd():
"""
Validate fluentd
"""
if platform.machine() != "x86_64":
print("Fluentd tests are only relevant in x86 architectures")
return
wait_for_pod_state("elasticsearch-logging-0", "kube-system", "running")
wait_for_pod_state("", "kube-system", "running", label="k8s-app=fluentd-es")
wait_for_pod_state("", "kube-system", "running", label="k8s-app=kibana-logging")
def validate_jaeger():
"""
Validate the jaeger operator
"""
if platform.machine() != "x86_64":
print("Jaeger tests are only relevant in x86 architectures")
return
wait_for_pod_state("", "default", "running", label="name=jaeger-operator")
attempt = 30
while attempt > 0:
try:
output = kubectl("get ingress")
if "simplest-query" in output:
break
except subprocess.CalledProcessError:
pass
time.sleep(2)
attempt -= 1
assert attempt > 0
def validate_linkerd():
"""
Validate Linkerd by deploying emojivoto.
"""
if platform.machine() != "x86_64":
print("Linkerd tests are only relevant in x86 architectures")
return
wait_for_installation()
wait_for_pod_state(
"",
"linkerd",
"running",
label="linkerd.io/control-plane-component=controller",
timeout_insec=300,
)
print("Linkerd controller up and running.")
wait_for_pod_state(
"",
"linkerd",
"running",
label="linkerd.io/control-plane-component=proxy-injector",
timeout_insec=300,
)
print("Linkerd proxy injector up and running.")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "emojivoto.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "emojivoto", "running", label="app=emoji-svc", timeout_insec=600)
kubectl("delete -f {}".format(manifest))
def validate_rbac():
"""
Validate RBAC is actually on
"""
output = kubectl("auth can-i --as=system:serviceaccount:default:default view pod", err_out="no")
assert "no" in output
output = kubectl("auth can-i --as=admin --as-group=system:masters view pod")
assert "yes" in output
def cilium(cmd, timeout_insec=300, err_out=None):
"""
Do a cilium <cmd>
Args:
cmd: left part of cilium <left_part> command
timeout_insec: timeout for this job
err_out: If command fails and this is the output, return.
Returns: the cilium response in a string
"""
cmd = "/snap/bin/microk8s.cilium " + cmd
return run_until_success(cmd, timeout_insec, err_out)
def validate_cilium():
"""
Validate cilium by deploying the bookinfo app.
"""
if platform.machine() != "x86_64":
print("Cilium tests are only relevant in x86 architectures")
return
wait_for_installation()
wait_for_pod_state("", "kube-system", "running", label="k8s-app=cilium")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "nginx-pod.yaml")
# Try up to three times to get nginx under cilium
for attempt in range(0, 10):
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=nginx")
output = cilium("endpoint list -o json", timeout_insec=20)
if "nginx" in output:
kubectl("delete -f {}".format(manifest))
break
else:
print("Cilium not ready will retry testing.")
kubectl("delete -f {}".format(manifest))
time.sleep(20)
else:
print("Cilium testing failed.")
assert False
def validate_multus():
"""
Validate multus by making sure the multus pod is running.
"""
wait_for_installation()
wait_for_pod_state("", "kube-system", "running", label="app=multus")
def validate_kubeflow():
"""
Validate kubeflow
"""
if platform.machine() != "x86_64":
print("Kubeflow tests are only relevant in x86 architectures")
return
wait_for_pod_state("ambassador-operator-0", "kubeflow", "running")
def validate_metallb_config(ip_ranges="192.168.0.105"):
"""
Validate Metallb
"""
if platform.machine() != "x86_64":
print("Metallb tests are only relevant in x86 architectures")
return
out = kubectl("get configmap config -n metallb-system -o jsonpath='{.data.config}'")
for ip_range in ip_ranges.split(","):
assert ip_range in out
def validate_coredns_config(ip_ranges="8.8.8.8,1.1.1.1"):
"""
Validate dns
"""
out = kubectl("get configmap coredns -n kube-system -o jsonpath='{.data.Corefile}'")
expected_forward_val = "forward ."
for ip_range in ip_ranges.split(","):
expected_forward_val = expected_forward_val + " " + ip_range
assert expected_forward_val in out
def validate_keda():
"""
Validate keda
"""
wait_for_installation()
wait_for_pod_state("", "keda", "running", label="app=keda-operator")
print("KEDA operator up and running.")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "keda-scaledobject.yaml")
kubectl("apply -f {}".format(manifest))
scaledObject = kubectl("-n gonuts get scaledobject.keda.sh")
assert "stan-scaledobject" in scaledObject
kubectl("delete -f {}".format(manifest))
def validate_traefik():
"""
Validate traefik
"""
wait_for_pod_state("", "traefik", "running", label="name=traefik-ingress-lb")
def validate_portainer():
"""
Validate portainer
"""
wait_for_pod_state("", "portainer", "running", label="app.kubernetes.io/name=portainer")
def validate_openfaas():
"""
Validate openfaas
"""
wait_for_pod_state("", "openfaas", "running", label="app=gateway")
def validate_openebs():
"""
Validate OpenEBS
"""
wait_for_installation()
wait_for_pod_state(
"",
"openebs",
"running",
label="openebs.io/component-name=ndm",
timeout_insec=900,
)
print("OpenEBS is up and running.")
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "openebs-test.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state(
"", "default", "running", label="app=openebs-test-busybox", timeout_insec=900
)
output = kubectl("exec openebs-test-busybox -- ls /", timeout_insec=900, err_out="no")
assert "my-data" in output
kubectl("delete -f {}".format(manifest))
def validate_kata():
"""
Validate Kata
"""
wait_for_installation()
here = os.path.dirname(os.path.abspath(__file__))
manifest = os.path.join(here, "templates", "nginx-kata.yaml")
kubectl("apply -f {}".format(manifest))
wait_for_pod_state("", "default", "running", label="app=kata")
kubectl("delete -f {}".format(manifest))
| 30.209677
| 100
| 0.63137
|
9e94e78acfe56507014f2d730e88c83acef8b6eb
| 362
|
py
|
Python
|
analysis/templatetags/karyomapping_tags.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 5
|
2021-01-14T03:34:42.000Z
|
2022-03-07T15:34:18.000Z
|
analysis/templatetags/karyomapping_tags.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | 551
|
2020-10-19T00:02:38.000Z
|
2022-03-30T02:18:22.000Z
|
analysis/templatetags/karyomapping_tags.py
|
SACGF/variantgrid
|
515195e2f03a0da3a3e5f2919d8e0431babfd9c9
|
[
"RSA-MD"
] | null | null | null |
from django.template import Library
register = Library()
@register.inclusion_tag("analysis/tags/genome_karyomapping_counts_summary.html")
def genome_karyomapping_counts_summary(genome_karyomapping_counts):
return {'genome_karyomapping_counts': genome_karyomapping_counts,
'collapsed_counts': genome_karyomapping_counts.get_collapsed_counts()}
| 36.2
| 82
| 0.828729
|
d0ebe9965ff7454ce51ae4d792033c911b34debe
| 4,151
|
py
|
Python
|
Simple Media Player using python/Tools/DisplayPanel.py
|
nexsct/simple-media-player
|
7e6d1df3024d6b9d594468ee31fee1515b52d2ce
|
[
"Apache-2.0"
] | 54
|
2017-01-14T07:18:23.000Z
|
2022-03-03T05:58:10.000Z
|
Simple Media Player using python/Tools/DisplayPanel.py
|
nexsct/simple-media-player
|
7e6d1df3024d6b9d594468ee31fee1515b52d2ce
|
[
"Apache-2.0"
] | 1
|
2018-05-14T01:26:32.000Z
|
2018-05-14T01:26:32.000Z
|
Simple Media Player using python/Tools/DisplayPanel.py
|
nexsct/simple-media-player
|
7e6d1df3024d6b9d594468ee31fee1515b52d2ce
|
[
"Apache-2.0"
] | 56
|
2017-04-18T18:25:58.000Z
|
2022-03-20T18:54:53.000Z
|
#!/usr/bin/python
# ---------------- READ ME ---------------------------------------------
# This Script is Created Only For Practise And Educational Purpose Only
# This is an Example Of Tkinter Canvas Graphics
# This Script Is Created For http://bitforestinfo.blogspot.in
# This Script is Written By
#
#
##################################################
######## Please Don't Remove Author Name #########
############### Thanks ###########################
##################################################
#
#
__author__='''
######################################################
By S.S.B Group
######################################################
Suraj Singh
Admin
S.S.B Group
surajsinghbisht054@gmail.com
http://bitforestinfo.blogspot.in/
Note: We Feel Proud To Be Indian
######################################################
'''
# ====================================================
# Usages:
# Player(root, var1, var2, var3)
# root=Frame or widget for place display
# var1=String Variable For Song Path
# var2=String Variable For Song Playing Time
# var3=String Variable For Song Durations
#=====================================================
# Here Importing Module
try:
import Tkinter, ttk
except:
import tkinter as Tkinter
import tkinter.ttk as ttk
import os.path
from Configuration_base import *
# Creating Class
class Player:
def __init__(self, root, song, time, duration):
self.root=Tkinter.Frame(root)
self.root.pack(side='top')
self.song=song
self.time=time
self.duration=duration
self.create_console()
self.auto_bind()
def auto_bind(self):
self.song.trace('w',self.update_song_title)
self.time.trace('w',self.update_time)
self.duration.trace('w', self.update_duration)
return
def create_console(self):
self.back_time_label=Tkinter.PhotoImage(file="../Icons/background.gif")
# consoleframe=Tkinter.LabelFrame(self.root, text='Display Panel', bg='aqua')
# consoleframe.pack(side='top', expand='yes', fill='x')
self.canvas=Tkinter.Canvas(self.root, width=400, height=100, bg='skyblue')
self.canvas.pack()
self.canvas.image=self.back_time_label
self.canvas.create_image(0, 0, anchor="nw", image=self.back_time_label)
self.time_display=self.canvas.create_text(10, 25, anchor="nw", fill='cornsilk', font=Digital_Clock_Font_Setting, text='0:00:00')
self.song_display=self.canvas.create_text(220,40, anchor="nw", fill='cornsilk', font=Songs_playing_Font_Setting, text='Nothing For Playing')
self.song_duration=self.canvas.create_text(220,65, anchor="nw", fill='cornsilk', font=duration_time_Font_Setting, text='[0:00:00]')
return
def song_title_filter(self, text):
if len(os.path.basename(text))>22:
name=os.path.basename(text)[0:20]+'...'
pass
else:
name=os.path.basename(text)
pass
return name
def update_duration(self, *args,**kwargs):
raw_text=self.duration.get()
text="[{}]".format(raw_text)
self.canvas.itemconfig(self.song_duration, text=text)
return
def update_time(self, *args, **kwargs):
text=self.time.get()
self.canvas.itemconfig(self.time_display, text=text)
return
def update_song_title(self, *args, **kwargs):
text=self.song.get()
text=self.song_title_filter(text)
self.canvas.itemconfig(self.song_display, text=text)
return
if __name__=='__main__':
root=Tkinter.Tk()
Var=Tkinter.IntVar()
root.title('Player Module')
var1=Tkinter.StringVar()
Tkinter.Entry(root, textvariable=var1).pack(side='top')
var2=Tkinter.StringVar()
Tkinter.Entry(root, textvariable=var2).pack(side='top')
var3=Tkinter.StringVar()
Tkinter.Entry(root, textvariable=var3).pack(side='top')
Player(root, var1, var2, var3)
while True:
root.update()
root.update_idletask()
| 34.591667
| 148
| 0.569742
|
93be7d1470bbc481053e354ab71e0b8175fcd4d0
| 1,922
|
py
|
Python
|
bot.py
|
Tharun1828/Sk-Droplink-bot
|
a046330d9a4daec106da822e686b9ec33dddb680
|
[
"MIT"
] | 1
|
2021-12-05T15:55:27.000Z
|
2021-12-05T15:55:27.000Z
|
bot.py
|
Tharun1828/Sk-Droplink-bot
|
a046330d9a4daec106da822e686b9ec33dddb680
|
[
"MIT"
] | null | null | null |
bot.py
|
Tharun1828/Sk-Droplink-bot
|
a046330d9a4daec106da822e686b9ec33dddb680
|
[
"MIT"
] | 5
|
2021-12-06T03:19:00.000Z
|
2022-03-16T06:44:56.000Z
|
from os import environ
import aiohttp
from pyrogram import Client, filters
API_ID = environ.get('API_ID')
API_HASH = environ.get('API_HASH')
BOT_TOKEN = environ.get('BOT_TOKEN')
API_KEY = environ.get('API_KEY', 'deca8552d6bfa7f9e86bc34290214c116036d5de')
bot = Client('pdiskshortner bot',
api_id=API_ID,
api_hash=API_HASH,
bot_token=BOT_TOKEN,
workers=50,
sleep_threshold=10)
@bot.on_message(filters.command('start') & filters.private)
async def start(bot, message):
await message.reply(
f"**Hi {message.chat.first_name}!**\n\n"
" one and Only a personal Bot to short links from droplink website Made with ♥️ by @NP_technology")
@bot.on_message(filters.command('help') & filters.private)
async def help(bot, message):
await message.reply(
f"**Hi {message.chat.first_name}!**\n\n"
"hey, bro I can convert big/long link to a short link of droplink Made with ♥️ by @NP_technology")
@bot.on_message(filters.regex(r'https?://[^\s]+') & filters.private)
async def link_handler(bot, message):
link = message.matches[0].group(0)
try:
short_link = await get_shortlink(link)
await message.reply(f'''<code>{short_link}</code>.
({short_link})
This link as been shortened by @np_technology for free subscribe to our YouTube channel to get more awesome thinks like this https://youtube.com/channel/UCJ58-uPHipMiP4-uVmp0iRw [Short Link]({short_link})''', quote=True)
except Exception as e:
await message.reply(f'Error: {e}', quote=True)
async def get_shortlink(link):
url = 'http://droplink.co/api'
params = {'api': API_KEY, 'url': link}
async with aiohttp.ClientSession() as session:
async with session.get(url, params=params, raise_for_status=True) as response:
data = await response.json()
return data["shortenedUrl"]
bot.run()
| 36.264151
| 220
| 0.675338
|
53a1ea4c94c697d6ce341a3366b48ba9b14e0361
| 9,356
|
py
|
Python
|
nova/virt/fake.py
|
russellb/nova
|
99c2e02b44a1012c8e26fc7658dc40ec4620a1ee
|
[
"Apache-2.0"
] | null | null | null |
nova/virt/fake.py
|
russellb/nova
|
99c2e02b44a1012c8e26fc7658dc40ec4620a1ee
|
[
"Apache-2.0"
] | null | null | null |
nova/virt/fake.py
|
russellb/nova
|
99c2e02b44a1012c8e26fc7658dc40ec4620a1ee
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
A fake (in-memory) hypervisor+api.
Allows nova testing w/o a hypervisor. This module also documents the
semantics of real hypervisor connections.
"""
from nova.compute import power_state
from nova import db
from nova import exception
from nova import log as logging
from nova import utils
from nova.virt import driver
LOG = logging.getLogger(__name__)
def get_connection(_=None):
# The read_only parameter is ignored.
return FakeConnection.instance()
class FakeInstance(object):
def __init__(self, name, state):
self.name = name
self.state = state
class FakeConnection(driver.ComputeDriver):
"""Fake hypervisor driver"""
def __init__(self):
self.instances = {}
self.host_status = {
'host_name-description': 'Fake Host',
'host_hostname': 'fake-mini',
'host_memory_total': 8000000000,
'host_memory_overhead': 10000000,
'host_memory_free': 7900000000,
'host_memory_free_computed': 7900000000,
'host_other_config': {},
'host_ip_address': '192.168.1.109',
'host_cpu_info': {},
'disk_available': 500000000000,
'disk_total': 600000000000,
'disk_used': 100000000000,
'host_uuid': 'cedb9b39-9388-41df-8891-c5c9a0c0fe5f',
'host_name_label': 'fake-mini'}
self._mounts = {}
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = cls()
return cls._instance
def init_host(self, host):
return
def list_instances(self):
return self.instances.keys()
def _map_to_instance_info(self, instance):
instance = utils.check_isinstance(instance, FakeInstance)
info = driver.InstanceInfo(instance.name, instance.state)
return info
def list_instances_detail(self):
info_list = []
for instance in self.instances.values():
info_list.append(self._map_to_instance_info(instance))
return info_list
def plug_vifs(self, instance, network_info):
"""Plug VIFs into networks."""
pass
def unplug_vifs(self, instance, network_info):
"""Unplug VIFs from networks."""
pass
def spawn(self, context, instance, image_meta,
network_info=None, block_device_info=None):
name = instance.name
state = power_state.RUNNING
fake_instance = FakeInstance(name, state)
self.instances[name] = fake_instance
def snapshot(self, context, instance, name):
if not instance['name'] in self.instances:
raise exception.InstanceNotRunning()
def reboot(self, instance, network_info, reboot_type):
pass
@staticmethod
def get_host_ip_addr():
return '192.168.0.1'
def resize(self, instance, flavor):
pass
def set_admin_password(self, instance, new_pass):
pass
def inject_file(self, instance, b64_path, b64_contents):
pass
def agent_update(self, instance, url, md5hash):
pass
def rescue(self, context, instance, network_info, image_meta):
pass
def unrescue(self, instance, network_info):
pass
def poll_rebooting_instances(self, timeout):
pass
def poll_rescued_instances(self, timeout):
pass
def migrate_disk_and_power_off(self, context, instance, dest,
instance_type, network_info):
pass
def finish_revert_migration(self, instance, network_info):
pass
def poll_unconfirmed_resizes(self, resize_confirm_window):
pass
def pause(self, instance):
pass
def unpause(self, instance):
pass
def suspend(self, instance):
pass
def resume(self, instance):
pass
def destroy(self, instance, network_info, block_device_info=None):
key = instance['name']
if key in self.instances:
del self.instances[key]
else:
LOG.warning("Key '%s' not in instances '%s'" %
(key, self.instances))
def attach_volume(self, connection_info, instance_name, mountpoint):
"""Attach the disk to the instance at mountpoint using info"""
if not instance_name in self._mounts:
self._mounts[instance_name] = {}
self._mounts[instance_name][mountpoint] = connection_info
return True
def detach_volume(self, connection_info, instance_name, mountpoint):
"""Detach the disk attached to the instance"""
try:
del self._mounts[instance_name][mountpoint]
except KeyError:
pass
return True
def get_info(self, instance_name):
if instance_name not in self.instances:
raise exception.InstanceNotFound(instance_id=instance_name)
i = self.instances[instance_name]
return {'state': i.state,
'max_mem': 0,
'mem': 0,
'num_cpu': 2,
'cpu_time': 0}
def get_diagnostics(self, instance_name):
return 'FAKE_DIAGNOSTICS'
def get_all_bw_usage(self, start_time, stop_time=None):
"""Return bandwidth usage info for each interface on each
running VM"""
bwusage = []
return bwusage
def list_disks(self, instance_name):
return ['A_DISK']
def list_interfaces(self, instance_name):
return ['A_VIF']
def block_stats(self, instance_name, disk_id):
return [0L, 0L, 0L, 0L, None]
def interface_stats(self, instance_name, iface_id):
return [0L, 0L, 0L, 0L, 0L, 0L, 0L, 0L]
def get_console_output(self, instance):
return 'FAKE CONSOLE OUTPUT\nANOTHER\nLAST LINE'
def get_vnc_console(self, instance):
return {'internal_access_path': 'FAKE',
'host': 'fakevncconsole.com',
'port': 6969}
def get_console_pool_info(self, console_type):
return {'address': '127.0.0.1',
'username': 'fakeuser',
'password': 'fakepassword'}
def refresh_security_group_rules(self, security_group_id):
return True
def refresh_security_group_members(self, security_group_id):
return True
def refresh_provider_fw_rules(self):
pass
def update_available_resource(self, ctxt, host):
pass
def compare_cpu(self, xml):
"""This method is supported only by libvirt."""
raise NotImplementedError('This method is supported only by libvirt.')
def ensure_filtering_rules_for_instance(self, instance_ref, network_info):
"""This method is supported only by libvirt."""
raise NotImplementedError('This method is supported only by libvirt.')
def get_instance_disk_info(self, instance_name):
"""This method is supported only by libvirt."""
return
def live_migration(self, context, instance_ref, dest,
post_method, recover_method, block_migration=False):
"""This method is supported only by libvirt."""
return
def finish_migration(self, context, migration, instance, disk_info,
network_info, image_meta, resize_instance):
return
def confirm_migration(self, migration, instance, network_info):
return
def pre_live_migration(self, block_device_info):
"""This method is supported only by libvirt."""
return
def unfilter_instance(self, instance_ref, network_info):
"""This method is supported only by libvirt."""
raise NotImplementedError('This method is supported only by libvirt.')
def test_remove_vm(self, instance_name):
""" Removes the named VM, as if it crashed. For testing"""
self.instances.pop(instance_name)
def update_host_status(self):
"""Return fake Host Status of ram, disk, network."""
return self.host_status
def get_host_stats(self, refresh=False):
"""Return fake Host Status of ram, disk, network."""
return self.host_status
def host_power_action(self, host, action):
"""Reboots, shuts down or powers up the host."""
pass
def set_host_enabled(self, host, enabled):
"""Sets the specified host's ability to accept new instances."""
pass
def get_disk_available_least(self):
""" """
pass
def get_volume_connector(self, instance):
return {'ip': '127.0.0.1', 'initiator': 'fake'}
| 30.67541
| 78
| 0.644506
|
776f796b655397efe6c9a768e1ad81d2fe19a84d
| 3,207
|
py
|
Python
|
server/utils/Signature_no_timestamp.py
|
y4096/flask-apiSign-demo
|
2138fb93e88f0f2b51f9bbe89f20f7c00acabc78
|
[
"MIT"
] | null | null | null |
server/utils/Signature_no_timestamp.py
|
y4096/flask-apiSign-demo
|
2138fb93e88f0f2b51f9bbe89f20f7c00acabc78
|
[
"MIT"
] | null | null | null |
server/utils/Signature_no_timestamp.py
|
y4096/flask-apiSign-demo
|
2138fb93e88f0f2b51f9bbe89f20f7c00acabc78
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from hashlib import sha256
from functools import wraps
from flask import request, jsonify
class Signature(object):
""" 接口签名认证 """
def __init__(self):
self._accessKeys = [
{"accesskey_id": "demo_id", "accesskey_secret": "demo_secret"}
]
def _check_req_accesskey_id(self, req_accesskey_id):
""" 校验accesskey_id
@pram req_accesskey_id str: 请求参数中的用户标识id
"""
if req_accesskey_id in [i['accesskey_id'] for i in self._accessKeys if "accesskey_id" in i]:
return True
return False
def _get_accesskey_secret(self, accesskey_id):
""" 根据accesskey_id获取对应的accesskey_secret
@pram accesskey_id str: 用户标识id
"""
return [i['accesskey_secret'] for i in self._accessKeys if i.get('accesskey_id') == accesskey_id][0]
def _sign(self, parameters):
""" 签名
@param parameters dict: 除signature外请求的所有查询参数(公共参数和私有参数)
"""
if "signature" in parameters:
parameters.pop("signature")
accesskey_id = parameters["accesskey_id"]
sortedParameters = sorted(parameters.items(), key=lambda parameters: parameters[0])
canonicalizedQueryString = ''
for (k, v) in sortedParameters:
canonicalizedQueryString += '{}={}&'.format(k, v)
canonicalizedQueryString += self._get_accesskey_secret(accesskey_id)
signature = sha256(canonicalizedQueryString.encode("utf-8")).hexdigest().upper()
return signature
def _verification(self, req_params):
""" 校验请求是否有效
@param req_params dict: 请求的所有查询参数(公共参数和私有参数)
"""
res = dict(msg='', success=False)
try:
req_accesskey_id = req_params["accesskey_id"]
req_signature = req_params["signature"]
except KeyError:
res.update(msg="Invalid public params")
except Exception:
res.update(msg="Unknown server error")
else:
# NO.1 校验accesskey_id
if self._check_req_accesskey_id(req_accesskey_id):
# NO.2 校验签名
if req_signature == self._sign(req_params):
res.update(msg="Verification pass", success=True)
else:
res.update(msg="Invalid query string")
else:
res.update(msg="Invalid accesskey_id")
return res
def signature_required(self, f):
@wraps(f)
def decorated_function(*args, **kwargs):
if request.method == "GET":
params = request.args.to_dict()
elif request.method == "POST":
params = request.json
print(params)
else:
return jsonify(dict(msg='only GET,POST allowed', success=False))
headers = request.headers
signature_headers = {'accesskey_id': headers['Accesskey-Id'], 'signature': headers['Signature']}
res = self._verification({**params, **signature_headers})
if res["success"] is True:
return f(*args, **kwargs)
else:
return jsonify(res)
return decorated_function
| 36.443182
| 108
| 0.590271
|
8e01e7c54856ee9c9939f76fa66d7528f355821f
| 799
|
py
|
Python
|
tests/test_timestamper.py
|
kierdavis/structlog-overtime
|
bfc7698ae0ef5b6321a21479af72f52e14b047b5
|
[
"Unlicense"
] | 1
|
2021-02-19T16:04:51.000Z
|
2021-02-19T16:04:51.000Z
|
tests/test_timestamper.py
|
kierdavis/structlog-overtime
|
bfc7698ae0ef5b6321a21479af72f52e14b047b5
|
[
"Unlicense"
] | null | null | null |
tests/test_timestamper.py
|
kierdavis/structlog-overtime
|
bfc7698ae0ef5b6321a21479af72f52e14b047b5
|
[
"Unlicense"
] | null | null | null |
import datetime
import structlog
from testfixtures import compare # type: ignore
import structlog_overtime
def mock_now() -> datetime.datetime:
return datetime.datetime(
2020, 11, 9, 12, 34, 56, tzinfo=datetime.timezone(datetime.timedelta(hours=3))
)
def test_timestamper() -> None:
dest = structlog_overtime.MockLoggerFactory()
structlog.configure(
processors=[structlog_overtime.TimezoneAwareTimeStamper(now=mock_now)],
logger_factory=dest,
)
structlog.get_logger().info("hello world")
compare(
dest.records,
expected=[
structlog_overtime.MockLogRecord(
method_name="info",
event={"event": "hello world", "timestamp": "2020-11-09T12:34:56+0300"},
)
],
)
| 25.774194
| 88
| 0.642053
|
38ca791f0416c794ab93cce76ea133a4e862a988
| 305
|
py
|
Python
|
setup.py
|
mdn/webalyzer
|
0b897d5df4a2c8d6881b5acefba707369b2f2b8c
|
[
"BSD-3-Clause"
] | 10
|
2015-04-28T17:27:19.000Z
|
2019-08-16T08:30:30.000Z
|
setup.py
|
mdn/webalyzer
|
0b897d5df4a2c8d6881b5acefba707369b2f2b8c
|
[
"BSD-3-Clause"
] | 27
|
2015-04-03T15:59:34.000Z
|
2017-07-25T16:43:58.000Z
|
setup.py
|
mdn/webalyzer
|
0b897d5df4a2c8d6881b5acefba707369b2f2b8c
|
[
"BSD-3-Clause"
] | 8
|
2015-04-06T17:37:15.000Z
|
2022-02-19T01:33:42.000Z
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='webalyzer',
version='0.1',
license='BSD',
description='This is https://github.com/mozilla/webalyzer',
author='Peter Bengtsson',
author_email='mail@peterbe.com',
url='https://github.com/mozilla/webalyzer',
)
| 21.785714
| 63
| 0.67541
|
3a91b794e2cb62b1faca31c62032139ffa0d4c55
| 443
|
py
|
Python
|
backend/items/models.py
|
ferizoozoo/todo-app
|
fee2eb4a4d9fde36e9060a4bac7f971a58ef9a21
|
[
"MIT"
] | null | null | null |
backend/items/models.py
|
ferizoozoo/todo-app
|
fee2eb4a4d9fde36e9060a4bac7f971a58ef9a21
|
[
"MIT"
] | 3
|
2022-02-13T07:59:24.000Z
|
2022-02-27T00:58:57.000Z
|
backend/items/models.py
|
ferizoozoo/todo-app
|
fee2eb4a4d9fde36e9060a4bac7f971a58ef9a21
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class TodoItem(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
task = models.CharField(max_length=500)
created_at = models.DateTimeField(auto_now_add=True)
deadline_at = models.DateTimeField()
active = models.BooleanField()
completed = models.BooleanField()
priority = models.BooleanField()
| 36.916667
| 60
| 0.756208
|
5d602c153100ec251f7af9d802c6355b1550daf5
| 3,404
|
py
|
Python
|
tests/handlers/test_directory.py
|
mweinelt/synapse
|
42a9ea37e4c6ff9d91b530c40d366446b9fc2234
|
[
"Apache-2.0"
] | 1
|
2017-02-03T18:58:29.000Z
|
2017-02-03T18:58:29.000Z
|
tests/handlers/test_directory.py
|
mweinelt/synapse
|
42a9ea37e4c6ff9d91b530c40d366446b9fc2234
|
[
"Apache-2.0"
] | null | null | null |
tests/handlers/test_directory.py
|
mweinelt/synapse
|
42a9ea37e4c6ff9d91b530c40d366446b9fc2234
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests import unittest
from twisted.internet import defer
from mock import Mock
from synapse.handlers.directory import DirectoryHandler
from synapse.types import RoomAlias
from tests.utils import setup_test_homeserver
class DirectoryHandlers(object):
def __init__(self, hs):
self.directory_handler = DirectoryHandler(hs)
class DirectoryTestCase(unittest.TestCase):
""" Tests the directory service. """
@defer.inlineCallbacks
def setUp(self):
self.mock_federation = Mock(spec=[
"make_query",
])
self.query_handlers = {}
def register_query_handler(query_type, handler):
self.query_handlers[query_type] = handler
self.mock_federation.register_query_handler = register_query_handler
hs = yield setup_test_homeserver(
http_client=None,
resource_for_federation=Mock(),
replication_layer=self.mock_federation,
)
hs.handlers = DirectoryHandlers(hs)
self.handler = hs.get_handlers().directory_handler
self.store = hs.get_datastore()
self.my_room = RoomAlias.from_string("#my-room:test")
self.your_room = RoomAlias.from_string("#your-room:test")
self.remote_room = RoomAlias.from_string("#another:remote")
@defer.inlineCallbacks
def test_get_local_association(self):
yield self.store.create_room_alias_association(
self.my_room, "!8765qwer:test", ["test"]
)
result = yield self.handler.get_association(self.my_room)
self.assertEquals({
"room_id": "!8765qwer:test",
"servers": ["test"],
}, result)
@defer.inlineCallbacks
def test_get_remote_association(self):
self.mock_federation.make_query.return_value = defer.succeed(
{"room_id": "!8765qwer:test", "servers": ["test", "remote"]}
)
result = yield self.handler.get_association(self.remote_room)
self.assertEquals({
"room_id": "!8765qwer:test",
"servers": ["test", "remote"],
}, result)
self.mock_federation.make_query.assert_called_with(
destination="remote",
query_type="directory",
args={
"room_alias": "#another:remote",
},
retry_on_dns_fail=False,
)
@defer.inlineCallbacks
def test_incoming_fed_query(self):
yield self.store.create_room_alias_association(
self.your_room, "!8765asdf:test", ["test"]
)
response = yield self.query_handlers["directory"](
{"room_alias": "#your-room:test"}
)
self.assertEquals({
"room_id": "!8765asdf:test",
"servers": ["test"],
}, response)
| 30.666667
| 76
| 0.64718
|
41453c88d540dc3a6250ded095a81c3f8d8393b8
| 1,553
|
py
|
Python
|
tests/test_auth.py
|
ISBITX/isbit-client-python
|
6943b69cb15d12a8c524461467e3806ab5b1ba19
|
[
"MIT"
] | 1
|
2018-04-22T01:30:21.000Z
|
2018-04-22T01:30:21.000Z
|
tests/test_auth.py
|
ISBITX/isbit-client-python
|
6943b69cb15d12a8c524461467e3806ab5b1ba19
|
[
"MIT"
] | 1
|
2018-04-26T05:30:41.000Z
|
2018-04-26T05:30:41.000Z
|
tests/test_auth.py
|
ISBITX/isbit-client-python
|
6943b69cb15d12a8c524461467e3806ab5b1ba19
|
[
"MIT"
] | 1
|
2018-04-26T01:54:42.000Z
|
2018-04-26T01:54:42.000Z
|
# coding:utf-8
import unittest2
from isbit_client import auth
class AuthTestCase(unittest2.TestCase):
def setUp(self):
self.auth = auth.Auth(
access_key = "accesskey",
secret_key = "secretkey")
def test_signed_challenge(self):
access_key = "abc"
challenge = "def"
secret_key = "ghi"
a = auth.Auth(
access_key = access_key,
secret_key = secret_key)
signature = a.signed_challenge(challenge)
self.assertEqual(signature, {
"auth": {
"access_key": access_key,
"answer": "52ca0e5beab532532c62155e78d81c7dc8ad6d6f744cf3797668cf52dd2f9a41",}})
def test_signed_params(self):
params = self.auth.signed_params(
"GET", "/api/v2/orders", params={
"tonce": 1234567})
self.assertEqual(params, {
"tonce": 1234567,
"access_key": "accesskey",
"signature": "1b89e3a984c25eacb7439ae644be253b55975e35529ee665966e3b9d8e3dcb2f",})
def test_sign(self):
sign = self.auth.sign(
verb = "get",
path = "/api/v2/orders",
params = {
"tonce": 1234567,
"access_key": "accesskey",})
self.assertEqual(sign, "1b89e3a984c25eacb7439ae644be253b55975e35529ee665966e3b9d8e3dcb2f")
def test_payload(self):
payload = auth.payload(
verb = "get",
path = "/api/v2/markets",
params = {
"tonce": 123456789,
"access_key": "xxx",
"foo": "bar",})
self.assertEqual(payload, "GET|/api/v2/markets|access_key=xxx&foo=bar&tonce=123456789")
if __name__ == "__main__":
unittest2.main()
| 28.759259
| 94
| 0.645203
|
eb55aa86d76cf44206ec70f2c6a65f0d9ffc1b6a
| 7,657
|
py
|
Python
|
model.py
|
devsisters/TCML-tensorflow
|
7c8057431d06a28276a7f4a04782a1f857006fbe
|
[
"MIT"
] | 115
|
2017-10-16T05:47:12.000Z
|
2021-11-09T22:52:21.000Z
|
model.py
|
devsisters/TCML-tensorflow
|
7c8057431d06a28276a7f4a04782a1f857006fbe
|
[
"MIT"
] | 1
|
2017-11-30T07:05:44.000Z
|
2017-12-01T05:14:37.000Z
|
model.py
|
devsisters/TCML-tensorflow
|
7c8057431d06a28276a7f4a04782a1f857006fbe
|
[
"MIT"
] | 22
|
2017-10-16T06:41:21.000Z
|
2021-11-09T22:52:24.000Z
|
import tensorflow as tf
import numpy as np
class TCML:
def __init__(self, hparams, input_tensor, label_tensor, is_train):
assert hparams.dilation is not None
self.num_classes = hparams.n
self.batch_size = hparams.batch_size
self.seq_len = hparams.seq_len
self.input_dim = hparams.input_dim
self.num_dense_filter = hparams.num_dense_filter
self.dilation = hparams.dilation
self.attention_value_dim = hparams.attention_value_dim
self.lr = hparams.lr
self.reg_coeff = hparams.reg_coeff
self.l2_loss = 0
self.filter_width = 2
self.input_placeholder = tf.cast(input_tensor, tf.float32)
self.label_placeholder = label_tensor
self.is_train = is_train
if self.is_train:
self.global_step = tf.get_variable("global_step", initializer=0, trainable=False)
else:
self.global_step = None
self.dense_blocks = []
feed_label, target_label = tf.split(self.label_placeholder, [self.seq_len - 1, 1],
axis=1)
self.target_label = target_label
feed_label_one_hot = tf.one_hot(feed_label,
depth=self.num_classes,
dtype=tf.float32)
feed_label_one_hot = tf.concat([feed_label_one_hot, tf.zeros((self.batch_size, 1, self.num_classes))], axis=1)
concated_input = tf.concat([self.input_placeholder, feed_label_one_hot], axis=2)
last_output = concated_input
d = self.input_dim + self.num_classes
for i, dilation in enumerate(self.dilation):
name = f"dilation{i}_{dilation}"
with tf.variable_scope(name):
last_output = output = self.generate_dense_block(last_output, d, dilation)
self.dense_blocks.append((name, output))
d += self.num_dense_filter
# last_output : [B, T, D + 128 * i]
with tf.variable_scope("attention"):
kernel_size = [1, d, self.attention_value_dim] # width, in_channel, out_channel
conv_kernel = tf.get_variable("1x1_conv", kernel_size,
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer_conv2d())
self.l2_loss += tf.nn.l2_loss(conv_kernel)
key, query = tf.split(last_output, [self.seq_len - 1, 1], axis=1)
attention_value = tf.nn.conv1d(key, conv_kernel, 1, "SAME")
attention_outputs = self.attention_layer(key, attention_value, query, float(d))
# attention_output : [B, 1, d']
# channel-wise softmax
with tf.variable_scope("softmax"):
kernel_size = [1, self.attention_value_dim, self.num_classes]
conv_kernel = tf.get_variable("1x1_conv", kernel_size,
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer_conv2d())
self.l2_loss += tf.nn.l2_loss(conv_kernel)
self.last_vector = softmax_vector = tf.nn.conv1d(attention_outputs, conv_kernel, 1, "SAME")
ce_loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(labels=target_label,
logits=softmax_vector))
self.loss = ce_loss + self.reg_coeff * self.l2_loss
self.train_step = tf.train.AdamOptimizer(self.lr).minimize(self.loss, global_step=self.global_step)
self.accuracy = self._calc_accuracy()
def _causal_conv(self, x, dilation, in_channel, out_channel):
with tf.variable_scope("causal_conv"):
# input shape : [B, T, D]
# filter_shape : spatial_filter_shape + [in_channels, out_channels]
filter_shape = [self.filter_width, in_channel, out_channel]
initializer = tf.contrib.layers.xavier_initializer_conv2d()
tanh_filter = tf.get_variable("tanh_filter", shape=filter_shape, dtype=tf.float32,
initializer=initializer)
sigmoid_filter = tf.get_variable("sigmoid_filter", shape=filter_shape, dtype=tf.float32,
initializer=initializer)
x_reverse = tf.pad(x, [[0, 0], [dilation, 0], [0, 0]])
tanh_output = tf.tanh(tf.nn.convolution(x_reverse, tanh_filter,
padding="VALID",
dilation_rate=(dilation,)))
sigmoid_output = tf.sigmoid(tf.nn.convolution(x_reverse, sigmoid_filter,
padding="VALID",
dilation_rate=(dilation,)))
return tf.multiply(tanh_output, sigmoid_output)
def _residual_block(self, x, dilation, num_filter):
# input shape : [B, T, D]
# [filter_height, filter_width, in_channels, out_channels]
conv_output = self._causal_conv(x, dilation, num_filter, num_filter)
return x + conv_output
def generate_dense_block(self, x, input_dim, dilation):
# input shape : [B, T, D]
conv = self._causal_conv(x, dilation, input_dim, self.num_dense_filter)
with tf.variable_scope("residual_block_1"):
residual1 = self._residual_block(conv, dilation, self.num_dense_filter)
with tf.variable_scope("residual_block_2"):
residual2 = self._residual_block(residual1, dilation, self.num_dense_filter)
return tf.concat([x, residual2], axis=2)
def attention_layer(self, key, value, query, d):
# key : B x T-1 x d
# value : B x T-1 x d'
# query : B x 1 x d
attention = tf.nn.softmax(tf.divide(tf.matmul(query, key, transpose_b=True), tf.sqrt(d))) # 1 x (t-1)
return tf.matmul(attention, value) # B x d'
def _calc_accuracy(self):
with tf.name_scope("accuracy"):
predictions = tf.argmax(self.last_vector, 2, name="predictions", output_type=tf.int32)
labels = self.target_label
correct_predictions = tf.equal(predictions, labels)
accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
# self.confusion_matrix = tf.confusion_matrix(labels, predictions, num_classes=self.num_classes)
return accuracy
def _make_dummy_data():
# 4 x 20 x 10 input data (float32)
# 4 x 20 label data (int, [0, 4])
input_data = np.random.randn(4, 20, 10)
label_data = np.random.randint(5, size=(4, 20))
return input_data, label_data
def _TCML_test():
class Dummy: pass
hparams = Dummy()
hparams.n = 5
hparams.input_dim = 10
hparams.num_dense_filter = 16
hparams.batch_size = 4
hparams.seq_len = 20
hparams.attention_value_dim = 16
hparams.dilation = [1, 2, 1, 2]
hparams.lr = 1e-3
with tf.Graph().as_default():
dummy_input, dummy_label = _make_dummy_data()
model = TCML(hparams, tf.stack(dummy_input), tf.cast(tf.stack(dummy_label), tf.int32), True)
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
sess = tf.Session(config=config)
with sess.as_default():
init = tf.initialize_all_variables()
sess.run(init)
_, loss, acc = sess.run([model.train_step, model.loss, model.accuracy])
print(loss, acc)
if __name__ == "__main__":
_TCML_test()
| 44.005747
| 118
| 0.599451
|
c91772b7dc1c4c07630efb84eae664dfe4a91cbf
| 570
|
py
|
Python
|
proxylist/proxies/proxydb.py
|
phenobarbital/proxylist
|
b412c71d47f848dd297fed5d2c2609d05d433692
|
[
"Apache-2.0"
] | null | null | null |
proxylist/proxies/proxydb.py
|
phenobarbital/proxylist
|
b412c71d47f848dd297fed5d2c2609d05d433692
|
[
"Apache-2.0"
] | null | null | null |
proxylist/proxies/proxydb.py
|
phenobarbital/proxylist
|
b412c71d47f848dd297fed5d2c2609d05d433692
|
[
"Apache-2.0"
] | 2
|
2021-05-17T23:01:43.000Z
|
2021-10-20T22:32:16.000Z
|
from .server import ProxyServer
import logging
class ProxyDB(ProxyServer):
url = "http://proxydb.net/?protocol=https&country="
async def get_proxies(self):
proxies = []
try:
table = self.parser.xpath("//table")[0]
except Exception as err:
logging.exception(err)
return []
for i in table.xpath("//tbody/tr")[:10]:
if i.xpath('.//td[5][contains(text(),"HTTPS")]'):
proxy = i.xpath(".//td[1]/a/text()")[0]
proxies.append(proxy)
return proxies
| 30
| 61
| 0.540351
|
b4ec7b8544e3fd6e0c9eb6ed65075e7e7d932e29
| 30
|
py
|
Python
|
Aulas de Python Mundo 3/Curso Python - #17 - Tuplas - Part 1.py
|
ErikDMCosta/CEV-Praticas_Python-Mundo_3
|
9669fe1647ec5effd0b1c4a576aa670107657a77
|
[
"MIT"
] | null | null | null |
Aulas de Python Mundo 3/Curso Python - #17 - Tuplas - Part 1.py
|
ErikDMCosta/CEV-Praticas_Python-Mundo_3
|
9669fe1647ec5effd0b1c4a576aa670107657a77
|
[
"MIT"
] | null | null | null |
Aulas de Python Mundo 3/Curso Python - #17 - Tuplas - Part 1.py
|
ErikDMCosta/CEV-Praticas_Python-Mundo_3
|
9669fe1647ec5effd0b1c4a576aa670107657a77
|
[
"MIT"
] | null | null | null |
num = (2, 5, 9, 7)
print(num)
| 10
| 18
| 0.5
|
c37f18257fde2b655491c6ed53c440c3e4f025da
| 18,276
|
py
|
Python
|
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/mglutil/TestUtil/tester.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | null | null | null |
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/mglutil/TestUtil/tester.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | 21
|
2021-10-02T00:07:05.000Z
|
2022-03-30T00:02:10.000Z
|
resources/mgltools_x86_64Linux2_1.5.6/MGLToolsPckgs/mglutil/TestUtil/tester.py
|
J-E-J-S/aaRS-Pipeline
|
43f59f28ab06e4b16328c3bc405cdddc6e69ac44
|
[
"MIT"
] | null | null | null |
#
#
# $Id: tester.py,v 1.9 2004/08/27 17:48:00 sophiec Exp $
#
#########################################################################
#
# Date: July 2003 Author: Sophie Coon, William Lindstrom
#
# sophiec@scripps.edu
# lindy@scripps.edu
#
# Copyright: Michel Sanner, Sophie Coon, William Lindstrom and TSRI
#
#########################################################################
import unittest, sys
import types, os, glob, string
class TestSuite(unittest.TestSuite):
def __init__(self, tests=(), setUpSuite=None, tearDownSuite=None):
# Need to know what my tests contain.
self.setUpSuite = setUpSuite
self.tearDownSuite = tearDownSuite
unittest.TestSuite.__init__(self, tests=tests)
def __call__(self, result=None):
if not self.setUpSuite is None:
## if type(self.setUpSuite) is types.MethodType and len(self._tests):
## self._tests[1].setUpSuite()
## else:
self.setUpSuite()
for test in self._tests:
if result.shouldStop:
break
test(result)
if not self.tearDownSuite is None:
## if type(self.tearDownSuite) is types.MethodType and len(self._tests):
## self._tests[0].tearDownSuite()
## else:
self.tearDownSuite()
return result
def __exc_info(self):
"""Return a version of sys.exc_info() with the traceback frame
minimised; usually the top level of the traceback frame is not
needed.
"""
exctype, excvalue, tb = sys.exc_info()
if sys.platform[:4] == 'java': ## tracebacks look different in Jython
return (exctype, excvalue, tb)
newtb = tb.tb_next
if newtb is None:
return (exctype, excvalue, tb)
return (exctype, excvalue, newtb)
class TestLoader(unittest.TestLoader):
"""
"""
testMethodPrefix = 'test_'
ignore = {}
ignoredChecked = False
suiteClass = TestSuite
def loadTestsFromFunctions(self, functions, setUp=None, tearDown=None):
"""
The functions needs to be from the same module.
creates a FunctionTestCase for each function in the sequence and
returns a TestSuite.
"""
ftc = []
if not type(functions) in [types.TupleType, types.ListType] and \
type(functions) is types.FunctionType:
functions = [functions,]
m = functions[0].__module__
modName = m.split('.')[-1]
parts = m.split(".")[:-1]
import string
p = string.join(parts, '/')
modPath = os.path.abspath(p)
for func in functions:
if not type(func) is types.FunctionType:continue
ftc.append(unittest.FunctionTestCase(func, setUp=setUp,
tearDown=tearDown))
return (modPath, self.suiteClass(ftc))
def loadTestsFromModule(self, module, funcPrefix=None):
modName = module.__name__.split('.')[-1]
tests = []
modPath = os.path.split(module.__file__)[0]
modPath = os.path.abspath(modPath)
ignoring = []
if self.ignore.has_key(modName):
# ignore the whole testModule
ignoring = self.ignore[modName]
if len(ignoring)==0:
return (modPath, self.suiteClass(tests))
if not funcPrefix is None:
self.testMethodPrefix = funcPrefix
# Look in the module if a setUp or setUpSuite is defined and a tearDown
# and tearDownSuite
inModule = dir(module)
if 'setUp' in inModule:
setUp = getattr(module, 'setUp')
if not type(setUp) is types.FunctionType:
setUp=None
inModule.remove('setUp')
else:
setUp = None
if 'tearDown' in inModule:
tearDown = getattr(module, 'tearDown')
if not type(tearDown) is types.FunctionType:
tearDown=None
inModule.remove('tearDown')
else:
tearDown = None
if 'setUpSuite' in inModule:
setUpSuite = getattr(module, 'setUpSuite')
if not type(setUpSuite) is types.FunctionType:
setUpSuite=None
inModule.remove('setUpSuite')
else:
setUpSuite = None
if 'tearDownSuite' in inModule:
tearDownSuite = getattr(module, 'tearDownSuite')
if not type(tearDownSuite) is types.FunctionType:
tearDownSuite=None
inModule.remove('tearDownSuite')
else:
tearDownSuite = None
testsFunc = []
for name in dir(module):
if name in ignoring: continue
obj = getattr(module, name)
if (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, unittest.TestCase)):
## inClass = dir(obj)
# Look if a setUpSuite and a tearDownSuite have been implemented
# for a testCase.else if one was implemented for the whole
# module it will be used.
## if 'setUpSuite' in inClass:
## print 'in setUpSuite'
## setUpSuite = getattr(obj, 'setUpSuite')
## if not type(setUpSuite) is types.MethodType:
## setUpSuite=None
## if 'tearDownSuite' in inClass:
## tearDownSuite = getattr(obj, 'tearDownSuite')
## if not type(tearDownSuite) is types.MethodType:
## tearDownSuite=None
ts = self.suiteClass(tests = map(obj,
self.getTestCaseNames(obj)),
setUpSuite=setUpSuite, tearDownSuite=tearDownSuite)
tests.append(ts)
elif type(obj) is types.FunctionType :
p = len(self.testMethodPrefix)
if name[:p]==self.testMethodPrefix:
testsFunc.append(unittest.FunctionTestCase(obj, setUp=setUp,
tearDown=tearDown))
if len(testsFunc):
ts = self.suiteClass(tests = testsFunc, setUpSuite=setUpSuite,
tearDownSuite=tearDownSuite)
tests.append(ts)
return (modPath, self.suiteClass(tests=tests))
def loadTestsFromPackageRecur(self, package, testDirName='Tests',
modPrefix=None, funcPrefix=None):
# Need to make sure that package is the proper thing
pathPack = package.__path__[0]
tests = []
pName = package.__name__
for root, dirs, files in os.walk(pathPack):
if testDirName in dirs:
#packName = root.replace("/", ".")
packName = ""
dir, name = os.path.split(root)
while name != pName:
if packName: packName = name+"."+packName
else: packName = name
dir, name = os.path.split(dir)
if packName:
packName = pName + "." + packName
else:
packName = pName
tests.append(self.loadTestsFromName(packName,
testDirName=testDirName,
modPrefix=modPrefix,
funcPrefix=funcPrefix))
return tests
def loadTestsFromPackage(self, package, testDirName="Tests",
modPrefix=None, funcPrefix=None):
"""
import all the module from a the given package test directory,
parse the __init__.py of the Tests directory to get the information
on which tests to not run. __init__.py empty then takes all the
pythonmodule with testName.py
"""
# package : package
# testDirName : string representing the name of the test directory
# 1- Needs to get the test directory
pathPack = package.__path__[0]
pathTests = os.path.join(pathPack, testDirName)
if not os.path.exists(pathTests):
return
testPackName = package.__name__ + "." + testDirName
testPack = __import__(testPackName)
components = testPackName.split('.')
for comp in components[1:]:
testPack = getattr(testPack, comp)
if testPack.__dict__.has_key('ignore') :
ignore = getattr(testPack, 'ignore')
if modPrefix is None and testPack.__dict__.has_key('modPrefix'):
modPrefix = getattr(testPack, "modPrefix")
if funcPrefix is None and testPack.__dict__.has_key('funcPrefix'):
funcPrefix = getattr(testPack,'funcPrefix')
# Then need to go in the given directory and get all the python files
# starting with the proper testMethodPrefix.
# 2- get the __init__.py and parse the file.
# Either use glob or walk.
if modPrefix is None:
modName = "/*.py"
else:
modName = "/%s*.py"%modPrefix
testModules = glob.glob(pathTests+modName)
ts = []
for testMod in testModules:
dir, file = os.path.split(testMod)
if file in ["__init__.py", "mvAll.log.py"]: continue
modName = os.path.splitext(file)[0]
ts.append(self.loadTestsFromName(testPackName+"."+modName,
funcPrefix=funcPrefix)[1])
# 3- Create a suite of all the tests in this module.
packSuite = self.suiteClass(ts)
return (pathTests, packSuite)
def getTestsModulesFromPackRecur(self, pack, testDirName='Tests', modPrefix=None):
pathPack = pack.__path__[0]
testModules = []
pName = pack.__name__
for root, dirs, files in os.walk(pathPack):
if testDirName in dirs:
packName = ""
dir, name = os.path.split(root)
while name != pName:
if packName: packName = name+"."+packName
else: packName = name
dir, name = os.path.split(dir)
if packName != "":
packName = pName + "." + packName
else:
packName = pName
pack = self.getObjFromName(packName, testDirName=testDirName)
testModules = testModules + self.getTestsModulesFromPack(pack, testDirName=testDirName, modPrefix=modPrefix)
return testModules
def getTestsModulesFromPack(self, pack, testDirName='Tests', modPrefix=None):
pathPack = pack.__path__[0]
pathTests = os.path.join(pathPack, testDirName)
if modPrefix is None:
modName = "/*.py"
else:
modName = "/%s*.py"%modPrefix
tm = glob.glob(pathTests+modName)
testModules = []
for testMod in tm:
dir, file = os.path.split(testMod)
if file in ["__init__.py", "mvAll.log.py"]: continue
modName = os.path.splitext(file)[0]
pName = pack.__name__+"."+testDirName+"."+modName
testModules.append(pName)
return testModules
def getTestsModules(self, name, recursive=False, module=None, testDirName='Tests',
modPrefix=None, funcPrefix=None):
if funcPrefix:
self.testMethodPrefix=funcPrefix
obj = self.getObjFromName(name, module=module, testDirName=testDirName)
import unittest
if type(obj) == types.ModuleType:
# Can either be a python module or a python package.
if hasattr(obj,'__path__') and os.path.isdir(obj.__path__[0]):
if recursive:
testModules = self.getTestsModulesFromPackRecur(obj,
testDirName=testDirName,
modPrefix=modPrefix)
return testModules
else:
testModules = self.getTestsModulesFromPack(obj,
testDirName=testDirName,
modPrefix=modPrefix)
return testModules
else:
return [obj.__name__,]
elif (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, unittest.TestCase)):
return [obj.__module__+'.'+obj.__name__,]
elif type(obj) == types.FunctionType:
return [obj.__module__+'.'+obj.__name__,]
def getObjFromName(self, name, module=None, testDirName='Tests'):
if name[-3:] == '.py':
name = name[:-3]
if '/' in name:
parts = name.split('/')
parts = filter(lambda x: x, parts)
else:
parts = name.split('.')
parts = filter(lambda x: x, parts)
if module is None:
if not parts:
raise ValueError, "incomplete test name: %s" % name
else:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__(string.join(parts_copy,'.'))
break
except ImportError:
del parts_copy[-1]
if not parts_copy: raise
parts = parts[1:]
obj = module
for part in parts:
obj = getattr(obj, part)
if part==testDirName:
if obj.__dict__.has_key('ignore'):
self.ignore = getattr(obj,'ignore')
return obj
def loadTestsFromName(self, name, recursive=False, module=None,
testDirName='Tests', modPrefix=None,
funcPrefix=None):
"""
Returns a suite of all tests cases given a string specifier.
The name may resolve either a package, a module, a test case class,
a test method within a test case class, a test function or a callable
object which returns a TestCase or TestSuite instance.
The metod optionally resolves the names relative to a given module.
"""
if funcPrefix:
self.testMethodPrefix=funcPrefix
## if name[-3:] == '.py':
## name = name[:-3]
## if '/' in name:
## parts = name.split('/')
## else:
## parts = name.split('.')
obj = self.getObjFromName(name, module=module, testDirName=testDirName)
import unittest
if type(obj) == types.ModuleType:
# Can either be a python module or a python package.
if hasattr(obj,'__path__') and os.path.isdir(obj.__path__[0]):
if recursive:
return self.loadTestsFromPackageRecur(obj,
testDirName=testDirName,
modPrefix=modPrefix
)
else:
return self.loadTestsFromPackage(obj,
testDirName=testDirName,
modPrefix=modPrefix)
else:
return self.loadTestsFromModule(obj)
elif (isinstance(obj, (type, types.ClassType)) and
issubclass(obj, unittest.TestCase)):
m = obj.__module__
parts = m.split(".")[:-1]
p = string.join(parts, "/")
return (p, self.loadTestsFromTestCase(obj))
elif type(obj) == types.FunctionType:
# need to get the setUp and tearDown method.
m = obj.__module__
module = __import__(m)
parts = m.split('.')
p = string.join(parts[:-1], '/')
modPath = os.path.abspath(p)
for part in parts[1:]:
module = getattr(module , part)
setUp = None
tearDown = None
setUpSuite = None
tearDownSuite = None
if module .__dict__.has_key('setUp'):
setUp = getattr(module , 'setUp')
if module .__dict__.has_key('tearDown'):
tearDown = getattr(module , 'tearDown')
if module .__dict__.has_key('setUpSuite'):
setUpSuite = getattr(module, 'setUpSuite')
if not type(setUpSuite) is types.FunctionType:
setUpSuite=None
if module .__dict__.has_key('tearDownSuite'):
tearDownSuite = getattr(module, 'tearDownSuite')
if not type(tearDownSuite) is types.FunctionType:
tearDownSuite=None
tfc = unittest.FunctionTestCase(obj, setUp=setUp,
tearDown=tearDown)
ts = self.suiteClass(tests = [tfc,], setUpSuite=setUpSuite,
tearDownSuite=tearDownSuite)
return (modPath, ts)
elif type(obj) == types.UnboundMethodType:
newobj = obj.im_class(obj.__name__)
m = newobj.__module__
parts = m.split(".")[:-1]
p = string.join(parts, "/")
return (p, obj.im_class(obj.__name__))
elif callable(obj):
test = obj()
if not isinstance(test, unittest.TestCase) and \
not isinstance(test, unittest.TestSuite):
raise ValueError, \
"calling %s returned %s, not a test" % (obj,test)
return (None,test)
else:
raise ValueError, "don't know how to make test from: %s" % obj
| 40.255507
| 124
| 0.518494
|
cacb5b3deb573c655a68adf26060c970b7cea280
| 5,266
|
py
|
Python
|
utils.py
|
Henjoyy/-
|
4fe5bb1c505f792226b2f99c0ab0615d74827aca
|
[
"MIT"
] | 46
|
2021-12-01T02:05:06.000Z
|
2022-03-17T07:24:40.000Z
|
utils.py
|
Henjoyy/-
|
4fe5bb1c505f792226b2f99c0ab0615d74827aca
|
[
"MIT"
] | 14
|
2021-12-07T08:27:13.000Z
|
2022-03-31T08:43:13.000Z
|
utils.py
|
Henjoyy/-
|
4fe5bb1c505f792226b2f99c0ab0615d74827aca
|
[
"MIT"
] | 20
|
2021-12-07T08:14:33.000Z
|
2022-03-28T10:52:06.000Z
|
import os
import torch
from torch.utils.data import DataLoader
from torchvision import datasets, transforms
from torchvision.transforms.functional import rotate
import config as c
from multi_transform_loader import ImageFolderMultiTransform
def get_random_transforms():
augmentative_transforms = []
if c.transf_rotations:
augmentative_transforms += [transforms.RandomRotation(180)]
if c.transf_brightness > 0.0 or c.transf_contrast > 0.0 or c.transf_saturation > 0.0:
augmentative_transforms += [transforms.ColorJitter(brightness=c.transf_brightness, contrast=c.transf_contrast,
saturation=c.transf_saturation)]
tfs = [transforms.Resize(c.img_size)] + augmentative_transforms + [transforms.ToTensor(),
transforms.Normalize(c.norm_mean, c.norm_std)]
transform_train = transforms.Compose(tfs)
return transform_train
def get_fixed_transforms(degrees):
cust_rot = lambda x: rotate(x, degrees, False, False, None)
augmentative_transforms = [cust_rot]
if c.transf_brightness > 0.0 or c.transf_contrast > 0.0 or c.transf_saturation > 0.0:
augmentative_transforms += [
transforms.ColorJitter(brightness=c.transf_brightness, contrast=c.transf_contrast,
saturation=c.transf_saturation)]
tfs = [transforms.Resize(c.img_size)] + augmentative_transforms + [transforms.ToTensor(),
transforms.Normalize(c.norm_mean,
c.norm_std)]
return transforms.Compose(tfs)
#tensor to numpy
def t2np(tensor):
'''pytorch tensor -> numpy array'''
return tensor.cpu().data.numpy() if tensor is not None else None
def get_loss(z, jac):
'''check equation 4 of the paper why this makes sense - oh and just ignore the scaling here'''
return torch.mean(0.5 * torch.sum(z ** 2, dim=(1,2,3)) - jac) / z.shape[1]
def load_datasets(dataset_path, class_name):
'''
Expected folder/file format to find anomalies of class <class_name> from dataset location <dataset_path>:
train data:
dataset_path/class_name/train/good/any_filename.png
dataset_path/class_name/train/good/another_filename.tif
dataset_path/class_name/train/good/xyz.png
[...]
test data:
'normal data' = non-anomalies
dataset_path/class_name/test/good/name_the_file_as_you_like_as_long_as_there_is_an_image_extension.webp
dataset_path/class_name/test/good/did_you_know_the_image_extension_webp?.png
dataset_path/class_name/test/good/did_you_know_that_filenames_may_contain_question_marks????.png
dataset_path/class_name/test/good/dont_know_how_it_is_with_windows.png
dataset_path/class_name/test/good/just_dont_use_windows_for_this.png
[...]
anomalies - assume there are anomaly classes 'crack' and 'curved'
dataset_path/class_name/test/crack/dat_crack_damn.png
dataset_path/class_name/test/crack/let_it_crack.png
dataset_path/class_name/test/crack/writing_docs_is_fun.png
[...]
dataset_path/class_name/test/curved/wont_make_a_difference_if_you_put_all_anomalies_in_one_class.png
dataset_path/class_name/test/curved/but_this_code_is_practicable_for_the_mvtec_dataset.png
[...]
'''
def target_transform(target):
return class_perm[target]
data_dir_train = os.path.join(dataset_path, class_name, 'train')
data_dir_test = os.path.join(dataset_path, class_name, 'test')
classes = os.listdir(data_dir_test)
if 'good' not in classes:
print('There should exist a subdirectory "good". Read the doc of this function for further information.')
exit()
classes.sort()
class_perm = list()
class_idx = 1
for cl in classes:
if cl == 'good':
class_perm.append(0)
else:
class_perm.append(class_idx)
class_idx += 1
transform_train = get_random_transforms()
trainset = ImageFolderMultiTransform(data_dir_train, transform=transform_train, n_transforms=c.n_transforms)
testset = ImageFolderMultiTransform(data_dir_test, transform=transform_train, target_transform=target_transform,
n_transforms=c.n_transforms_test)
return trainset, testset
def make_dataloaders(trainset, testset):
trainloader = torch.utils.data.DataLoader(trainset, pin_memory=True, batch_size=c.batch_size, shuffle=True,
drop_last=False)
testloader = torch.utils.data.DataLoader(testset, pin_memory=True, batch_size=c.batch_size_test, shuffle=True,
drop_last=False)
return trainloader, testloader
def preprocess_batch(data):
'''move data to device and reshape image'''
inputs, labels = data
inputs, labels = inputs.to(c.device), labels.to(c.device)
inputs = inputs.view(-1, *inputs.shape[-3:])
return inputs, labels
| 42.467742
| 118
| 0.663502
|
ae02ff6ce2bcdf03a70dbe26ea3101f36f09f9a3
| 256
|
py
|
Python
|
mlrose_hiive/algorithms/decay/__init__.py
|
sridatta/mlrose
|
d9fe33df157bd801a33b13a803d737c4f8e48ebb
|
[
"BSD-3-Clause"
] | 63
|
2019-09-24T14:09:51.000Z
|
2022-03-09T02:36:25.000Z
|
mlrose_hiive/algorithms/decay/__init__.py
|
kunla/mlrose
|
306c96b156182a847f5c9e6ffb62eb805d266fef
|
[
"BSD-3-Clause"
] | 6
|
2019-10-04T01:04:21.000Z
|
2021-08-31T19:06:13.000Z
|
mlrose_hiive/algorithms/decay/__init__.py
|
kunla/mlrose
|
306c96b156182a847f5c9e6ffb62eb805d266fef
|
[
"BSD-3-Clause"
] | 104
|
2019-09-23T22:44:43.000Z
|
2022-03-13T18:50:53.000Z
|
""" Classes for defining optimization problem objects."""
# Author: Genevieve Hayes
# License: BSD 3 clause
from .arith_decay import ArithDecay
from .geom_decay import GeomDecay
from .exp_decay import ExpDecay
from .custom_schedule import CustomSchedule
| 25.6
| 57
| 0.808594
|
b5d518a9f50305ad6de3444bfc54bc53d25fbb6a
| 8,660
|
py
|
Python
|
ghoma2mqtt.py
|
poldy79/ghoma2mqtt
|
fa0ed8efb1010b595800f630d56662b67e0787c9
|
[
"MIT"
] | null | null | null |
ghoma2mqtt.py
|
poldy79/ghoma2mqtt
|
fa0ed8efb1010b595800f630d56662b67e0787c9
|
[
"MIT"
] | 1
|
2019-09-25T10:00:44.000Z
|
2019-10-03T15:08:21.000Z
|
ghoma2mqtt.py
|
poldy79/ghoma2mqtt
|
fa0ed8efb1010b595800f630d56662b67e0787c9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
import SocketServer
import time
import socket
import mosquitto
def printHex(my_hex):
if type(my_hex)==str:
print " ".join(hex(ord(n)) for n in my_hex)
if type(my_hex)==list:
result = []
for i in my_hex:
result.append("0x%1x"%(i))
print ",".join(result)
class InvalidMsg():
def __init__(self,err):
self.err = err
pass
class GhomaMsgEncode():
def __init__(self,cmd,payload,mode=0):
self.msg = "\x5a\xa5"
self.msg+=chr(mode)
self.msg+=chr(len(payload)+1)
self.msg+=chr(cmd)
checksum = 0xff-cmd
for i in payload:
self.msg+=chr(i)
checksum-=i
if checksum <0:
checksum+=256
self.msg+=chr(checksum)
self.msg+="\x5b\xb5"
class GhomaMsgDecode():
def __init__(self,msg):
if not msg.startswith("\x5a\xa5"):
raise InvalidMsg("Invialid prefix")
self.mode = ord(msg[2])
self.length = ord(msg[3])-1
self.cmd = ord(msg[4])
self.payload = []
checksum = 0xff-self.cmd
for i in range(self.length):
self.payload.append(ord(msg[5+i]))
checksum-=ord(msg[5+i])
if checksum < 0:
checksum+=256
#self.payload = msg[5:-3]
if not checksum == ord(msg[5+self.length]):
raise InvalidMsg("Invalid checksum")
if not msg[6+self.length:].startswith("\x5b\xb5"):
printHex(msg)
raise InvalidMsg("Invalid postfix")
self.next = msg[8+self.length:]
class ThreadedEchoRequestHandler(
SocketServer.BaseRequestHandler,
):
"""
The request handler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client.
"""
def publishState(self):
t = "ghoma/%s/state"%(self.mac)
print "Publish %s:%s"%(t,self.state)
self.client.publish(t, self.state)
self.client.publish("ghoma",self.mac)
def handle(self):
def on_message(client, userdata, msg):
if msg.payload == "1":
self.request.sendall(GhomaMsgEncode(cmd=0x10,payload=[0x01,01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x00,0x00,0x10,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0xff]).msg)
elif msg.payload == "0":
self.request.sendall(GhomaMsgEncode(cmd=0x10,payload=[0x01,01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x00,0x00,0x10,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0x00]).msg)
def on_connect(client, userdata, flags):
#client.subscribe("ghoma/ac:cf:23:d7:8a:6c/set")
print "Connected to broker"
self.mac = "00:00:00:00:00:00"
self.state = "unknown"
self.client = mosquitto.Mosquitto()
self.client.connect(host="localhost",port=1883)
self.client.on_message = on_message
self.client.on_connect = on_connect
self.client.loop_start()
print "Sending Init 1 Part 1"
self.request.sendall(GhomaMsgEncode(cmd=2,payload=[0x0a,0x11,0x01,0x03,0x0f,0x0c]).msg)
print "Sending Init 1 Part 2"
self.request.sendall(GhomaMsgEncode(cmd=2,payload=[]).msg)
print "Sending Init 2"
self.request.sendall(GhomaMsgEncode(cmd=5,payload=[0x01]).msg)
alive = time.time()
while True:
try:
self.data = self.request.recv(1024)
except:
return
if len(self.data) == 0:
time.sleep(.1)
if time.time() - alive > 30:
print "Timeout exceeded"
return
continue
while not self.data == "":
msg = GhomaMsgDecode(self.data)
if msg.cmd == 0x03 and msg.payload==[0x01,0x0a,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x01,0x00]:
print "Received Init 1 reply"
print "Sending Init 2"
self.request.sendall(GhomaMsgEncode(cmd=0x05,payload=[0x01]).msg)
elif msg.cmd == 0x04 and msg.payload==[0x01,0x0a,0xc0,0x32,0x23,0xd7,0x8a,0x6c]:
print "Received Alive"
self.request.sendall(GhomaMsgEncode(cmd=0x06,mode=1,payload=[]).msg)
self.publishState()
elif msg.cmd==0x07 and msg.payload==[0x01,0x0a,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x00,0x01,0x06,0xac,0xcf,0x23,0xd7,0x8a,0x6c]:
#last 6 bytes are the MAC [0xac,0xcf,0x23,0xd7,0x8a,0x6c]
self.mac= ":".join("%02x"%(n) for n in msg.payload[-6:])
print "Recieved Init 2 part 1 reply"
self.publishState()
self.client.subscribe("ghoma/%s/set"%(self.mac))
elif msg.cmd==0x07 and msg.payload==[0x01,0x0a,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x00,0x02,0x05,0x00,0x01,0x01,0x08,0x1a,0xe0,0x5b,0xb5,0x5a,0xa5,0x0,0x15,0x90,0x1,0xa,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x1,0x81,0x11,0x0,0x0,0x1,0x0,0x0,0x0,0x0]:
print "Received known sequence with cmd id 7 during initialize"
printHex(msg.payload)
elif msg.cmd==0x07 and msg.payload==[0x01,0x0a,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x00,0x02,0x05,0x00,0x01,0x08,0x1a]:
print "Received Init 2 part 2 reply"
elif msg.cmd==0x07 and msg.payload==[0x01,0x0a,0xc0,0x8,0x32,0x13,0xd7,0x8a,0x6c,0x00,0x00,0x00,0x1f]:
print "Received known sequence with cmd id 7 during initialize"
printHex(msg.payload)
elif msg.cmd==0x07 and msg.payload==[0x1,0xa,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x0,0x2,0x5,0x0,0x1,0x1,0x8,0x1a]:
print "Received known sequence with cmd id 7 during initialize"
printHex(msg.payload)
elif msg.cmd==0x90:
if msg.payload==[0x01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x01,0x81,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0x00]:
print "Someone pressed the switch from on->off"
self.state = "0"
self.publishState()
elif msg.payload==[0x01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x01,0x81,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0xff]:
print "Someone pressed the switch from off->on"
self.state = "1"
self.publishState()
elif msg.payload==[0x01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x01,0x11,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0x00]:
print "Switch AUS confirmed"
self.state = "0"
self.publishState()
elif msg.payload==[0x01,0x0a,0xe0,0x32,0x23,0xd7,0x8a,0x6c,0xff,0xfe,0x01,0x11,0x11,0x00,0x00,0x01,0x00,0x00,0x00,0xff]:
print "Switch EIN confirmed"
self.state = "1"
self.publishState()
else:
print "Unknown Payload with cmd 0x90"
elif msg.cmd==0xfe and msg.payload==[0x01,0xa,0xc0,0x32,0x23,0xd7,0x8a,0x6c,0x00,0x00,0x00,0x1f]:
print "Received cmd 254 - propably something went wrong"
printHex(msg.payload)
else:
print "Received unknown data with cmd id %i"%(msg.cmd)
printHex(msg.payload)
#printHex(self.data[2:-3])
self.data = msg.next
alive = time.time()
return
class ThreadedEchoServer(SocketServer.ThreadingMixIn,
SocketServer.TCPServer,
):
def server_bind(self):
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(self.server_address)
pass
if __name__ == "__main__":
HOST, PORT = "", 4196
while True:
try:
server = ThreadedEchoServer((HOST, PORT),ThreadedEchoRequestHandler)
print "Server started!"
break
except:
print "Port still busy..."
time.sleep(1)
pass
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
server.serve_forever()
| 42.038835
| 263
| 0.55127
|
d5e5c98bb4da6aaaa7a2890e7f62db39fd383898
| 17,631
|
py
|
Python
|
clients/common/rocsparse_gentest.py
|
cgmb/rocSPARSE
|
9a56ca84a4d0091875e25c5c1e1c0b15f3147c83
|
[
"MIT"
] | null | null | null |
clients/common/rocsparse_gentest.py
|
cgmb/rocSPARSE
|
9a56ca84a4d0091875e25c5c1e1c0b15f3147c83
|
[
"MIT"
] | null | null | null |
clients/common/rocsparse_gentest.py
|
cgmb/rocSPARSE
|
9a56ca84a4d0091875e25c5c1e1c0b15f3147c83
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
# ########################################################################
# Copyright (c) 2019-2020 Advanced Micro Devices, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# ########################################################################
"""Expand rocSPARSE YAML test data file into binary Arguments records"""
import re
import sys
import os
import argparse
import ctypes
import glob
from fnmatch import fnmatchcase
try: # Import either the C or pure-Python YAML parser
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
import yaml
# Regex for type names in the YAML file. Optional *nnn indicates array.
TYPE_RE = re.compile(r'[a-z_A-Z]\w*(:?\s*\*\s*\d+)?$')
# Regex for integer ranges A..B[..C]
INT_RANGE_RE = re.compile(r'\s*(-?\d+)\s*\.\.\s*(-?\d+)\s*(?:\.\.\s*(-?\d+)\s*)?$')
# Regex for include: YAML extension
INCLUDE_RE = re.compile(r'include\s*:\s*(.*)')
# Regex for complex types
COMPLEX_RE = re.compile(r'f\d+_c$')
args = {}
testcases = set()
datatypes = {}
param = {}
def main():
args.update(parse_args().__dict__)
for doc in get_yaml_docs():
process_doc(doc)
def process_doc(doc):
"""Process one document in the YAML file"""
# Ignore empty documents
if not doc or not doc.get('Tests'):
return
# Clear datatypes and params from previous documents
datatypes.clear()
param.clear()
# Return dictionary of all known datatypes
datatypes.update(get_datatypes(doc))
# Arguments structure corresponding to C/C++ structure
param['Arguments'] = type('Arguments', (ctypes.Structure,),
{'_fields_': get_arguments(doc)})
# Special names which get expanded as lists of arguments
param['dict_lists_to_expand'] = doc.get('Dictionary lists to expand') or ()
# Lists which are not expanded
param['lists_to_not_expand'] = doc.get('Lists to not expand') or ()
# Defaults
defaults = doc.get('Defaults') or {}
# Known Bugs
param['known_bugs'] = doc.get('Known bugs') or []
# Functions
param['Functions'] = doc.get('Functions') or {}
# Instantiate all of the tests, starting with defaults
for test in doc['Tests']:
case = defaults.copy()
case.update(test)
generate(case, instantiate)
def parse_args():
"""Parse command-line arguments, returning input and output files"""
parser = argparse.ArgumentParser(description="""
Expand rocSPARSE YAML test data file into binary Arguments records
""")
parser.add_argument('infile',
nargs='?',
type=argparse.FileType('r'),
default=sys.stdin)
parser.add_argument('-o', '--out',
dest='outfile',
type=argparse.FileType('wb'),
default=sys.stdout)
parser.add_argument('-I',
help="Add include path",
action='append',
dest='includes',
default=[])
parser.add_argument('-t', '--template',
type=argparse.FileType('r'))
return parser.parse_args()
def read_yaml_file(file):
"""Read the YAML file, processing include: lines as an extension"""
file_dir = os.path.dirname(file.name) or os.getcwd()
source = []
for line_no, line in enumerate(file, start=1):
# Keep track of file names and line numbers for each line of YAML
match = line.startswith('include') and INCLUDE_RE.match(line)
if not match:
source.append([line, file.name, line_no])
else:
include_file = match.group(1)
include_dirs = [file_dir] + args['includes']
for path in include_dirs:
path = os.path.join(path, include_file)
if os.path.exists(path):
source.extend(read_yaml_file(open(path, 'r')))
break
else:
sys.exit("In file " + file.name + ", line " +
str(line_no) + ", column " + str(match.start(1)+1) +
":\n" + line.rstrip() + "\n" + " " * match.start(1) +
"^\nCannot open " + include_file +
"\n\nInclude paths:\n" + "\n".join(include_dirs))
file.close()
return source
def get_yaml_docs():
"""Parse the YAML file"""
source = read_yaml_file(args['infile'])
if args.get('template'):
source = read_yaml_file(args['template']) + source
source_str = ''.join([line[0] for line in source])
def mark_str(mark):
line = source[mark.line]
return("In file " + line[1] + ", line " + str(line[2]) + ", column " +
str(mark.column + 1) + ":\n" + line[0].rstrip() + "\n" +
' ' * mark.column + "^\n")
# We iterate through all of the documents to properly diagnose errors,
# because the load_all generator does not handle exceptions correctly.
docs = []
load = Loader(source_str)
while load.check_data():
try:
doc = load.get_data()
except yaml.YAMLError as err:
sys.exit((mark_str(err.problem_mark) if err.problem_mark else "") +
(err.problem + "\n" if err.problem else "") +
(err.note + "\n" if err.note else ""))
else:
docs.append(doc)
return docs
def get_datatypes(doc):
""" Get datatypes from YAML doc"""
dt = ctypes.__dict__.copy()
for declaration in doc.get('Datatypes') or ():
for name, decl in declaration.items():
if isinstance(decl, dict):
# Create derived class type based on bases and attr entries
dt[name] = type(name,
tuple([eval(t, dt)
for t in decl.get('bases') or ()
if TYPE_RE.match(t)]
), decl.get('attr') or {})
# Import class' attributes into the datatype namespace
for subtype in decl.get('attr') or {}:
if TYPE_RE.match(subtype):
dt[subtype] = eval(name+'.'+subtype, dt)
elif isinstance(decl, str) and TYPE_RE.match(decl):
dt[name] = dt[decl]
else:
sys.exit("Unrecognized data type "+name+": "+repr(decl))
return dt
def get_arguments(doc):
"""The kernel argument list, with argument names and types"""
return [(var, eval(decl[var], datatypes))
for decl in doc.get('Arguments') or ()
if len(decl) == 1
for var in decl
if TYPE_RE.match(decl[var])]
def setdefaults(test):
"""Set default values for parameters"""
# Do not put constant defaults here -- use rocsparse_common.yaml for that.
# These are only for dynamic defaults
# TODO: This should be ideally moved to YAML file, with eval'd expressions.
if test['transA'] == 111 or test['transB'] == 111:
test.setdefault('lda', 0)
test.setdefault('ldb', 0)
test.setdefault('ldc', 0)
test.setdefault('ldd', 0)
else:
test.setdefault('lda', test['M'] if test['transA'] == 111 else
test['K'])
test.setdefault('ldb', test['K'] if test['transB'] == 111 else
test['N'])
test.setdefault('ldc', test['M'])
test.setdefault('ldd', test['M'])
def write_signature(out):
"""Write the signature used to verify binary file compatibility"""
if 'signature_written' not in args:
sig = 0
byt = bytearray("rocSPARSE", 'utf_8')
byt.append(0)
last_ofs = 0
for (name, ctype) in param['Arguments']._fields_:
member = getattr(param['Arguments'], name)
for i in range(0, member.offset - last_ofs):
byt.append(0)
for i in range(0, member.size):
byt.append(sig ^ i)
sig = (sig + 89) % 256
last_ofs = member.offset + member.size
for i in range(0, ctypes.sizeof(param['Arguments']) - last_ofs):
byt.append(0)
byt.extend(bytes("ROCsparse", 'utf_8'))
byt.append(0)
out.write(byt)
args['signature_written'] = True
def write_test(test):
"""Write the test case out to the binary file if not seen already"""
# For each argument declared in arguments, we generate a positional
# argument in the Arguments constructor. For strings, we pass the
# value of the string directly. For arrays, we unpack their contents
# into the ctype array constructor and pass the ctype array. For
# scalars, we coerce the string/numeric value into ctype.
arg = []
for name, ctype in param['Arguments']._fields_:
try:
if issubclass(ctype, ctypes.Array):
if issubclass(ctype._type_, ctypes.c_char):
arg.append(bytes(test[name], 'utf_8'))
else:
arg.append(ctype(*test[name]))
elif issubclass(ctype, ctypes.c_char):
arg.append(bytes(test[name], 'utf_8'))
else:
arg.append(ctype(test[name]))
except TypeError as err:
sys.exit("TypeError: " + str(err) + " for " + name +
", which has type " + str(type(test[name])) + "\n")
byt = bytes(param['Arguments'](*arg))
if byt not in testcases:
testcases.add(byt)
write_signature(args['outfile'])
args['outfile'].write(byt)
def instantiate(test):
"""Instantiate a given test case"""
test = test.copy()
# Any Arguments fields declared as enums
enum_args = [decl[0] for decl in param['Arguments']._fields_
if decl[1].__module__ == '__main__']
try:
setdefaults(test)
# If no enum arguments are complex, clear alphai and betai
for typename in enum_args:
if COMPLEX_RE.match(test[typename]):
break
else:
for name in ('alphai', 'betai', 'boostvali'):
if name in test:
test[name] = 0.0
# For enum arguments, replace name with value
for typename in enum_args:
test[typename] = datatypes[test[typename]]
# Match known bugs
if test['category'] not in ('known_bug', 'disabled'):
for bug in param['known_bugs']:
for key, value in bug.items():
if key not in test:
break
if key == 'function':
if not fnmatchcase(test[key], value):
break
# For keys declared as enums, compare resulting values
elif test[key] != (datatypes.get(value) if key in enum_args
else value):
break
else: # All values specified in known bug match test case
test['category'] = 'known_bug'
break
write_test(test)
except KeyError as err:
sys.exit("Undefined value " + str(err) + "\n" + str(test))
def generate(test, function):
"""Generate test combinations by iterating across lists recursively"""
test = test.copy()
# For specially named lists, they are expanded and merged into the test
# argument list. When the list name is a dictionary of length 1, its pairs
# indicate that the argument named by its key takes on values paired with
# the argument named by its value, which is another dictionary list. We
# process the value dictionaries' keys in alphabetic order, to ensure
# deterministic test ordering.
for argname in param['dict_lists_to_expand']:
if type(argname) == dict:
if len(argname) == 1:
arg, target = list(argname.items())[0]
if arg in test and type(test[arg]) == dict:
pairs = sorted(list(test[arg].items()), key=lambda x: x[0])
for test[arg], test[target] in pairs:
generate(test, function)
return
elif argname in test and type(test[argname]) in (tuple, list, dict):
# Pop the list and iterate across it
ilist = test.pop(argname)
# For a bare dictionary, wrap it in a list and apply it once
for item in [ilist] if type(ilist) == dict else ilist:
try:
case = test.copy()
case.update(item) # original test merged with each item
generate(case, function)
except TypeError as err:
sys.exit("TypeError: " + str(err) + " for " + argname +
", which has type " + str(type(item)) +
"\nA name listed in \"Dictionary lists to expand\" "
"must be a defined as a dictionary.\n")
return
for key in sorted(list(test)):
# Integer arguments which are ranges (A..B[..C]) are expanded
if type(test[key]) == str:
match = INT_RANGE_RE.match(str(test[key]))
if match:
for test[key] in range(int(match.group(1)),
int(match.group(2))+1,
int(match.group(3) or 1)):
generate(test, function)
return
# For sequence arguments, they are expanded into scalars
elif (type(test[key]) in (tuple, list) and
key not in param['lists_to_not_expand']):
if key == "filename" and test[key] != "*":
for test[key] in test[key]:
#
# Get the root path.
#
out_path = os.path.dirname(args['outfile'].name) + "/../matrices/"
#
# Get argument.
#
filename_arg = out_path + str(test[key])
#
# It is a directory.
#
if os.path.isdir(filename_arg):
#
# List the files.
#
names = glob.glob(filename_arg + "/*")
for name in names:
subpath=os.path.splitext(name.replace(out_path,""))[0]
test[key]=[subpath]
generate(test,function)
return
else:
#
# Might be a regular expression
#
names = glob.glob(filename_arg)
if not names:
names = glob.glob(filename_arg + ".csr")
if not names:
names = glob.glob(filename_arg + ".bsr")
if not names:
names = glob.glob(filename_arg + ".bsr")
if not names:
print("skip unrecognized filename expression: '" + test[key] + "'")
else:
generate(test,function)
else:
generate(test,function)
else:
generate(test,function)
else:
for name in names:
subpath=os.path.splitext(name.replace(out_path,""))[0]
test[key]=[subpath]
generate(test,function)
return
else:
for test[key] in test[key]:
generate(test,function)
return
# Replace typed function names with generic functions and types
if 'rocsparse_function' in test:
func = test.pop('rocsparse_function')
if func in param['Functions']:
test.update(param['Functions'][func])
else:
test['function'] = func
generate(test, function)
return
function(test)
if __name__ == '__main__':
main()
| 38.162338
| 107
| 0.528217
|
756a93b65a33e9d880e0c8c63a6bc5e5b79f873d
| 936
|
py
|
Python
|
bookmeister/__main__.py
|
ethru/bookmejster
|
b0708c8cd50450a978270c24eee2d4f980cefea6
|
[
"MIT"
] | 1
|
2021-09-22T22:58:33.000Z
|
2021-09-22T22:58:33.000Z
|
bookmeister/__main__.py
|
ethru/bookmejster
|
b0708c8cd50450a978270c24eee2d4f980cefea6
|
[
"MIT"
] | 4
|
2021-06-08T21:05:07.000Z
|
2022-03-12T00:19:25.000Z
|
bookmeister/__main__.py
|
ethru/bookmejster
|
b0708c8cd50450a978270c24eee2d4f980cefea6
|
[
"MIT"
] | null | null | null |
"""#### Main
Contain `main` function which create application GUI and set its size. Module
`sys` is needed to specify which platform is used and adjust application width.
#### License
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from sys import platform
from bookmeister.gui import Gui
def main():
"""Set GUI width according to used platform and run it."""
width = '450' if platform == 'win32' else '600'
Gui('Bookstore Manager', f'{width}x470').mainloop()
if __name__ == '__main__':
main()
| 31.2
| 79
| 0.74359
|
d5624551b2797fe719d7af5273f22221cbc485ba
| 12,207
|
py
|
Python
|
homeassistant/components/zwave_js/__init__.py
|
AdmiralStipe/core
|
e9334347eb8354795cdb17f1401a80ef3abfb269
|
[
"Apache-2.0"
] | 4
|
2016-06-22T12:00:41.000Z
|
2018-06-11T20:31:25.000Z
|
homeassistant/components/zwave_js/__init__.py
|
AdmiralStipe/core
|
e9334347eb8354795cdb17f1401a80ef3abfb269
|
[
"Apache-2.0"
] | 54
|
2016-06-15T04:52:32.000Z
|
2022-03-22T06:02:16.000Z
|
homeassistant/components/zwave_js/__init__.py
|
AdmiralStipe/core
|
e9334347eb8354795cdb17f1401a80ef3abfb269
|
[
"Apache-2.0"
] | 6
|
2019-07-06T00:43:13.000Z
|
2021-01-16T13:27:06.000Z
|
"""The Z-Wave JS integration."""
import asyncio
import logging
from typing import Callable, List
from async_timeout import timeout
from zwave_js_server.client import Client as ZwaveClient
from zwave_js_server.exceptions import BaseZwaveJSServerError
from zwave_js_server.model.node import Node as ZwaveNode
from zwave_js_server.model.notification import Notification
from zwave_js_server.model.value import ValueNotification
from homeassistant.components.hassio.handler import HassioAPIError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .api import async_register_api
from .const import (
ATTR_COMMAND_CLASS,
ATTR_COMMAND_CLASS_NAME,
ATTR_DEVICE_ID,
ATTR_DOMAIN,
ATTR_ENDPOINT,
ATTR_HOME_ID,
ATTR_LABEL,
ATTR_NODE_ID,
ATTR_PARAMETERS,
ATTR_PROPERTY_KEY_NAME,
ATTR_PROPERTY_NAME,
ATTR_TYPE,
ATTR_VALUE,
CONF_INTEGRATION_CREATED_ADDON,
DATA_CLIENT,
DATA_UNSUBSCRIBE,
DOMAIN,
EVENT_DEVICE_ADDED_TO_REGISTRY,
PLATFORMS,
ZWAVE_JS_EVENT,
)
from .discovery import async_discover_values
from .entity import get_device_id
LOGGER = logging.getLogger(__package__)
CONNECT_TIMEOUT = 10
DATA_CLIENT_LISTEN_TASK = "client_listen_task"
DATA_START_PLATFORM_TASK = "start_platform_task"
async def async_setup(hass: HomeAssistant, config: dict) -> bool:
"""Set up the Z-Wave JS component."""
hass.data[DOMAIN] = {}
return True
@callback
def register_node_in_dev_reg(
hass: HomeAssistant,
entry: ConfigEntry,
dev_reg: device_registry.DeviceRegistry,
client: ZwaveClient,
node: ZwaveNode,
) -> None:
"""Register node in dev reg."""
device = dev_reg.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={get_device_id(client, node)},
sw_version=node.firmware_version,
name=node.name or node.device_config.description or f"Node {node.node_id}",
model=node.device_config.label,
manufacturer=node.device_config.manufacturer,
)
async_dispatcher_send(hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Z-Wave JS from a config entry."""
client = ZwaveClient(entry.data[CONF_URL], async_get_clientsession(hass))
dev_reg = await device_registry.async_get_registry(hass)
@callback
def async_on_node_ready(node: ZwaveNode) -> None:
"""Handle node ready event."""
LOGGER.debug("Processing node %s", node)
# register (or update) node in device registry
register_node_in_dev_reg(hass, entry, dev_reg, client, node)
# run discovery on all node values and create/update entities
for disc_info in async_discover_values(node):
LOGGER.debug("Discovered entity: %s", disc_info)
async_dispatcher_send(
hass, f"{DOMAIN}_{entry.entry_id}_add_{disc_info.platform}", disc_info
)
# add listener for stateless node value notification events
node.on(
"value notification",
lambda event: async_on_value_notification(event["value_notification"]),
)
# add listener for stateless node notification events
node.on(
"notification", lambda event: async_on_notification(event["notification"])
)
@callback
def async_on_node_added(node: ZwaveNode) -> None:
"""Handle node added event."""
# we only want to run discovery when the node has reached ready state,
# otherwise we'll have all kinds of missing info issues.
if node.ready:
async_on_node_ready(node)
return
# if node is not yet ready, register one-time callback for ready state
LOGGER.debug("Node added: %s - waiting for it to become ready.", node.node_id)
node.once(
"ready",
lambda event: async_on_node_ready(event["node"]),
)
# we do submit the node to device registry so user has
# some visual feedback that something is (in the process of) being added
register_node_in_dev_reg(hass, entry, dev_reg, client, node)
@callback
def async_on_node_removed(node: ZwaveNode) -> None:
"""Handle node removed event."""
# grab device in device registry attached to this node
dev_id = get_device_id(client, node)
device = dev_reg.async_get_device({dev_id})
# note: removal of entity registry entry is handled by core
dev_reg.async_remove_device(device.id) # type: ignore
@callback
def async_on_value_notification(notification: ValueNotification) -> None:
"""Relay stateless value notification events from Z-Wave nodes to hass."""
device = dev_reg.async_get_device({get_device_id(client, notification.node)})
value = notification.value
if notification.metadata.states:
value = notification.metadata.states.get(str(value), value)
hass.bus.async_fire(
ZWAVE_JS_EVENT,
{
ATTR_TYPE: "value_notification",
ATTR_DOMAIN: DOMAIN,
ATTR_NODE_ID: notification.node.node_id,
ATTR_HOME_ID: client.driver.controller.home_id,
ATTR_ENDPOINT: notification.endpoint,
ATTR_DEVICE_ID: device.id, # type: ignore
ATTR_COMMAND_CLASS: notification.command_class,
ATTR_COMMAND_CLASS_NAME: notification.command_class_name,
ATTR_LABEL: notification.metadata.label,
ATTR_PROPERTY_NAME: notification.property_name,
ATTR_PROPERTY_KEY_NAME: notification.property_key_name,
ATTR_VALUE: value,
},
)
@callback
def async_on_notification(notification: Notification) -> None:
"""Relay stateless notification events from Z-Wave nodes to hass."""
device = dev_reg.async_get_device({get_device_id(client, notification.node)})
hass.bus.async_fire(
ZWAVE_JS_EVENT,
{
ATTR_TYPE: "notification",
ATTR_DOMAIN: DOMAIN,
ATTR_NODE_ID: notification.node.node_id,
ATTR_HOME_ID: client.driver.controller.home_id,
ATTR_DEVICE_ID: device.id, # type: ignore
ATTR_LABEL: notification.notification_label,
ATTR_PARAMETERS: notification.parameters,
},
)
# connect and throw error if connection failed
try:
async with timeout(CONNECT_TIMEOUT):
await client.connect()
except (asyncio.TimeoutError, BaseZwaveJSServerError) as err:
raise ConfigEntryNotReady from err
else:
LOGGER.info("Connected to Zwave JS Server")
unsubscribe_callbacks: List[Callable] = []
hass.data[DOMAIN][entry.entry_id] = {
DATA_CLIENT: client,
DATA_UNSUBSCRIBE: unsubscribe_callbacks,
}
# Set up websocket API
async_register_api(hass)
async def start_platforms() -> None:
"""Start platforms and perform discovery."""
# wait until all required platforms are ready
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, component)
for component in PLATFORMS
]
)
driver_ready = asyncio.Event()
async def handle_ha_shutdown(event: Event) -> None:
"""Handle HA shutdown."""
await disconnect_client(hass, entry, client, listen_task, platform_task)
listen_task = asyncio.create_task(
client_listen(hass, entry, client, driver_ready)
)
hass.data[DOMAIN][entry.entry_id][DATA_CLIENT_LISTEN_TASK] = listen_task
unsubscribe_callbacks.append(
hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, handle_ha_shutdown)
)
await driver_ready.wait()
LOGGER.info("Connection to Zwave JS Server initialized")
# Check for nodes that no longer exist and remove them
stored_devices = device_registry.async_entries_for_config_entry(
dev_reg, entry.entry_id
)
known_devices = [
dev_reg.async_get_device({get_device_id(client, node)})
for node in client.driver.controller.nodes.values()
]
# Devices that are in the device registry that are not known by the controller can be removed
for device in stored_devices:
if device not in known_devices:
dev_reg.async_remove_device(device.id)
# run discovery on all ready nodes
for node in client.driver.controller.nodes.values():
async_on_node_added(node)
# listen for new nodes being added to the mesh
client.driver.controller.on(
"node added", lambda event: async_on_node_added(event["node"])
)
# listen for nodes being removed from the mesh
# NOTE: This will not remove nodes that were removed when HA was not running
client.driver.controller.on(
"node removed", lambda event: async_on_node_removed(event["node"])
)
platform_task = hass.async_create_task(start_platforms())
hass.data[DOMAIN][entry.entry_id][DATA_START_PLATFORM_TASK] = platform_task
return True
async def client_listen(
hass: HomeAssistant,
entry: ConfigEntry,
client: ZwaveClient,
driver_ready: asyncio.Event,
) -> None:
"""Listen with the client."""
should_reload = True
try:
await client.listen(driver_ready)
except asyncio.CancelledError:
should_reload = False
except BaseZwaveJSServerError:
pass
# The entry needs to be reloaded since a new driver state
# will be acquired on reconnect.
# All model instances will be replaced when the new state is acquired.
if should_reload:
LOGGER.info("Disconnected from server. Reloading integration")
asyncio.create_task(hass.config_entries.async_reload(entry.entry_id))
async def disconnect_client(
hass: HomeAssistant,
entry: ConfigEntry,
client: ZwaveClient,
listen_task: asyncio.Task,
platform_task: asyncio.Task,
) -> None:
"""Disconnect client."""
listen_task.cancel()
platform_task.cancel()
await asyncio.gather(listen_task, platform_task)
if client.connected:
await client.disconnect()
LOGGER.info("Disconnected from Zwave JS Server")
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
if not unload_ok:
return False
info = hass.data[DOMAIN].pop(entry.entry_id)
for unsub in info[DATA_UNSUBSCRIBE]:
unsub()
if DATA_CLIENT_LISTEN_TASK in info:
await disconnect_client(
hass,
entry,
info[DATA_CLIENT],
info[DATA_CLIENT_LISTEN_TASK],
platform_task=info[DATA_START_PLATFORM_TASK],
)
return True
async def async_remove_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Remove a config entry."""
if not entry.data.get(CONF_INTEGRATION_CREATED_ADDON):
return
try:
await hass.components.hassio.async_stop_addon("core_zwave_js")
except HassioAPIError as err:
LOGGER.error("Failed to stop the Z-Wave JS add-on: %s", err)
return
try:
await hass.components.hassio.async_uninstall_addon("core_zwave_js")
except HassioAPIError as err:
LOGGER.error("Failed to uninstall the Z-Wave JS add-on: %s", err)
| 35.485465
| 101
| 0.673958
|
4db2e3fd03d40bd3f29ef705d3d60d13fede18ec
| 1,771
|
py
|
Python
|
game/hangman.py
|
MayankShrivastava17/mini-project
|
c6fa2c95caba792ec4641eaeafaa5ab18f9e7abd
|
[
"MIT"
] | 1
|
2021-05-25T13:30:05.000Z
|
2021-05-25T13:30:05.000Z
|
game/hangman.py
|
MayankShrivastava17/mini-project
|
c6fa2c95caba792ec4641eaeafaa5ab18f9e7abd
|
[
"MIT"
] | null | null | null |
game/hangman.py
|
MayankShrivastava17/mini-project
|
c6fa2c95caba792ec4641eaeafaa5ab18f9e7abd
|
[
"MIT"
] | null | null | null |
#importing the time module
import time
#welcoming the user
name = input("What is your name? ")
print ("Hello, " + name, "Time to play hangman!")
#wait for 1 second
time.sleep(1)
print ("Start guessing...")
time.sleep(0.5)
#here we set the secret
word = "secret"
#creates an variable with an empty value
guesses = ''
#determine the number of turns
turns = 10
# Create a while loop
#check if the turns are more than zero
while turns > 0:
# make a counter that starts with zero
failed = 0
# for every character in secret_word
for char in word:
# see if the character is in the players guess
if char in guesses:
# print then out the character
print (char)
else:
# if not found, print a dash
print ("_")
# and increase the failed counter with one
failed += 1
# if failed is equal to zero
# print You Won
if failed == 0:
print ("You won" )
# exit the script
break
print
# ask the user go guess a character
guess = input("guess a character:")
# set the players guess to guesses
guesses += guess
# if the guess is not found in the secret word
if guess not in word:
# turns counter decreases with 1 (now 9)
turns -= 1
# print wrong
print ("Wrong")
# how many turns are left
print ("You have {}, more guesses".format(turns))
# if the turns are equal to zero
if turns == 0:
# print "You Lose"
print ("You Lose")
| 21.083333
| 58
| 0.524562
|
922bfa84515e9079548c4f7fd0e8d704405f4a64
| 3,196
|
py
|
Python
|
homeassistant/components/surepetcare/sensor.py
|
kauzu/core
|
6eadc0c3033473381cb70babf9f7c813e183d2df
|
[
"Apache-2.0"
] | 2
|
2021-01-29T02:52:01.000Z
|
2021-05-15T04:23:18.000Z
|
homeassistant/components/surepetcare/sensor.py
|
kauzu/core
|
6eadc0c3033473381cb70babf9f7c813e183d2df
|
[
"Apache-2.0"
] | 68
|
2020-07-23T07:13:53.000Z
|
2022-03-31T06:01:48.000Z
|
homeassistant/components/surepetcare/sensor.py
|
kauzu/core
|
6eadc0c3033473381cb70babf9f7c813e183d2df
|
[
"Apache-2.0"
] | 7
|
2021-03-20T12:34:01.000Z
|
2021-12-02T10:13:52.000Z
|
"""Support for Sure PetCare Flaps/Pets sensors."""
from __future__ import annotations
import logging
from surepy.entities import SurepyEntity
from surepy.enums import EntityType
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import ATTR_VOLTAGE, DEVICE_CLASS_BATTERY, PERCENTAGE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from . import SurePetcareAPI
from .const import (
DOMAIN,
SPC,
SURE_BATT_VOLTAGE_DIFF,
SURE_BATT_VOLTAGE_LOW,
TOPIC_UPDATE,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up Sure PetCare Flaps sensors."""
if discovery_info is None:
return
entities: list[SurepyEntity] = []
spc: SurePetcareAPI = hass.data[DOMAIN][SPC]
for surepy_entity in spc.states.values():
if surepy_entity.type in [
EntityType.CAT_FLAP,
EntityType.PET_FLAP,
EntityType.FEEDER,
EntityType.FELAQUA,
]:
entities.append(SureBattery(surepy_entity.id, spc))
async_add_entities(entities)
class SureBattery(SensorEntity):
"""A sensor implementation for Sure Petcare Entities."""
_attr_should_poll = False
def __init__(self, _id: int, spc: SurePetcareAPI) -> None:
"""Initialize a Sure Petcare sensor."""
self._id = _id
self._spc: SurePetcareAPI = spc
surepy_entity: SurepyEntity = self._spc.states[_id]
self._attr_device_class = DEVICE_CLASS_BATTERY
self._attr_name = f"{surepy_entity.type.name.capitalize()} {surepy_entity.name.capitalize()} Battery Level"
self._attr_native_unit_of_measurement = PERCENTAGE
self._attr_unique_id = (
f"{surepy_entity.household_id}-{surepy_entity.id}-battery"
)
@callback
def _async_update(self) -> None:
"""Get the latest data and update the state."""
surepy_entity = self._spc.states[self._id]
state = surepy_entity.raw_data()["status"]
self._attr_available = bool(state)
try:
per_battery_voltage = state["battery"] / 4
voltage_diff = per_battery_voltage - SURE_BATT_VOLTAGE_LOW
self._attr_native_value = min(
int(voltage_diff / SURE_BATT_VOLTAGE_DIFF * 100), 100
)
except (KeyError, TypeError):
self._attr_native_value = None
if state:
voltage_per_battery = float(state["battery"]) / 4
self._attr_extra_state_attributes = {
ATTR_VOLTAGE: f"{float(state['battery']):.2f}",
f"{ATTR_VOLTAGE}_per_battery": f"{voltage_per_battery:.2f}",
}
else:
self._attr_extra_state_attributes = None
self.async_write_ha_state()
_LOGGER.debug("%s -> state: %s", self.name, state)
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
self.async_on_remove(
async_dispatcher_connect(self.hass, TOPIC_UPDATE, self._async_update)
)
self._async_update()
| 31.643564
| 115
| 0.664894
|
3a99eada8bc892c61a3e79525966450957cdd4bb
| 18,321
|
py
|
Python
|
src/ralph/data_center/models/physical.py
|
angry-tony/cmdb-ralph
|
eb2ad2212a133025b698eb48e379c0bfe14cace0
|
[
"Apache-2.0"
] | null | null | null |
src/ralph/data_center/models/physical.py
|
angry-tony/cmdb-ralph
|
eb2ad2212a133025b698eb48e379c0bfe14cace0
|
[
"Apache-2.0"
] | null | null | null |
src/ralph/data_center/models/physical.py
|
angry-tony/cmdb-ralph
|
eb2ad2212a133025b698eb48e379c0bfe14cace0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import re
from collections import namedtuple
from itertools import chain
from django import forms
from django.core.exceptions import ValidationError
from django.core.validators import RegexValidator
from django.db import models, transaction
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from ralph.accounts.models import Region
from ralph.admin.autocomplete import AutocompleteTooltipMixin
from ralph.admin.sites import ralph_site
from ralph.admin.widgets import AutocompleteWidget
from ralph.assets.models.assets import Asset, NamedMixin
from ralph.assets.models.choices import AssetSource
from ralph.assets.utils import move_parents_models
from ralph.back_office.models import BackOfficeAsset, Warehouse
from ralph.data_center.models.choices import (
ConnectionType,
DataCenterAssetStatus,
Orientation,
RackOrientation
)
from ralph.lib.mixins.fields import (
NullableCharField,
NullableGenericIPAddressField
)
from ralph.lib.mixins.models import AdminAbsoluteUrlMixin
from ralph.lib.transitions.decorators import transition_action
from ralph.lib.transitions.fields import TransitionField
# i.e. number in range 1-16 and optional postfix 'A' or 'B'
VALID_SLOT_NUMBER_FORMAT = re.compile('^([1-9][A,B]?|1[0-6][A,B]?)$')
ACCESSORY_DATA = [
'brush', 'patch_panel_fc', 'patch_panel_utp', 'organizer', 'power_socket'
]
class Gap(object):
"""A placeholder that represents a gap in a blade chassis"""
id = 0
barcode = '-'
sn = '-'
service = namedtuple('Service', ['name'])('-')
model = namedtuple('Model', ['name'])('-')
linked_device = None
def __init__(self, slot_no, orientation):
self.slot_no = slot_no
self.orientation = orientation
def get_orientation_desc(self):
return self.orientation
def get_absolute_url(self):
return ''
@classmethod
def generate_gaps(cls, items):
def get_number(slot_no):
"""Returns the integer part of slot number"""
m = re.match(r'(\d+)', slot_no)
return (m and int(m.group(0))) or 0
if not items:
return []
max_slot_no = max([
get_number(asset.slot_no)
for asset in items
])
first_asset_slot_no = items[0].slot_no
ab = first_asset_slot_no and first_asset_slot_no[-1] in {'A', 'B'}
slot_nos = {asset.slot_no for asset in items}
def handle_missing(slot_no):
if slot_no not in slot_nos:
items.append(Gap(slot_no, items[0].get_orientation_desc()))
for slot_no in range(1, max_slot_no + 1):
if ab:
for letter in ['A', 'B']:
handle_missing(str(slot_no) + letter)
else:
handle_missing(str(slot_no))
return items
class DataCenter(AdminAbsoluteUrlMixin, NamedMixin, models.Model):
_allow_in_dashboard = True
show_on_dashboard = models.BooleanField(default=True)
@property
def rack_set(self):
return Rack.objects.select_related(
'server_room'
).filter(server_room__data_center=self)
@property
def server_rooms(self):
return ServerRoom.objects.filter(data_center=self)
def __str__(self):
return self.name
class ServerRoom(NamedMixin.NonUnique, models.Model):
_allow_in_dashboard = True
data_center = models.ForeignKey(DataCenter, verbose_name=_("data center"))
data_center._autocomplete = False
data_center._filter_title = _('data center')
visualization_cols_num = models.PositiveIntegerField(
verbose_name=_('visualization grid columns number'),
default=20,
)
visualization_rows_num = models.PositiveIntegerField(
verbose_name=_('visualization grid rows number'),
default=20,
)
def __str__(self):
return '{} ({})'.format(self.name, self.data_center.name)
class Accessory(NamedMixin):
class Meta:
verbose_name = _('accessory')
verbose_name_plural = _('accessories')
class RackAccessory(AdminAbsoluteUrlMixin, models.Model):
accessory = models.ForeignKey(Accessory)
rack = models.ForeignKey('Rack')
orientation = models.PositiveIntegerField(
choices=Orientation(),
default=Orientation.front.id,
)
position = models.IntegerField(null=True, blank=False)
remarks = models.CharField(
verbose_name='Additional remarks',
max_length=1024,
blank=True,
)
class Meta:
verbose_name_plural = _('rack accessories')
def get_orientation_desc(self):
return Orientation.name_from_id(self.orientation)
def __str__(self):
rack_name = self.rack.name if self.rack else ''
accessory_name = self.accessory.name if self.accessory else ''
return '{rack_name} - {accessory_name}'.format(
rack_name=rack_name, accessory_name=accessory_name,
)
class Rack(AdminAbsoluteUrlMixin, NamedMixin.NonUnique, models.Model):
_allow_in_dashboard = True
server_room = models.ForeignKey(
ServerRoom, verbose_name=_('server room'),
null=True,
blank=True,
)
server_room._autocomplete = False
server_room._filter_title = _('server room')
description = models.CharField(
_('description'), max_length=250, blank=True
)
orientation = models.PositiveIntegerField(
choices=RackOrientation(),
default=RackOrientation.top.id,
)
max_u_height = models.IntegerField(default=48)
visualization_col = models.PositiveIntegerField(
verbose_name=_('column number on visualization grid'),
default=0,
)
visualization_row = models.PositiveIntegerField(
verbose_name=_('row number on visualization grid'),
default=0,
)
accessories = models.ManyToManyField(Accessory, through='RackAccessory')
require_position = models.BooleanField(
default=True,
help_text=_(
'Uncheck if position is optional for this rack (ex. when rack '
'has warehouse-kind role'
)
)
class Meta:
unique_together = ('name', 'server_room')
def __str__(self):
if self.server_room:
return "{} ({}/{})".format(
self.name,
self.server_room.data_center,
self.server_room.name,
)
return self.name
def get_orientation_desc(self):
return RackOrientation.name_from_id(self.orientation)
def get_root_assets(self, side=None):
filter_kwargs = {
'rack': self,
}
if side:
filter_kwargs['orientation'] = side
else:
filter_kwargs['orientation__in'] = [
Orientation.front, Orientation.back
]
return DataCenterAsset.objects.select_related(
'model', 'model__category'
).filter(
Q(slot_no='') | Q(slot_no=None), **filter_kwargs
).exclude(model__has_parent=True)
def get_free_u(self):
u_list = [True] * self.max_u_height
accessories = RackAccessory.objects.values_list(
'position').filter(rack=self)
dc_assets = self.get_root_assets().values_list(
'position', 'model__height_of_device'
)
def fill_u_list(objects, height_of_device=lambda obj: 1):
for obj in objects:
# if position is None when objects simply does not have
# (assigned) position and position 0 is for some
# accessories (pdu) with left-right orientation and
# should not be included in free/filled space.
if obj[0] == 0 or obj[0] is None:
continue
start = obj[0] - 1
end = min(
self.max_u_height, obj[0] + int(height_of_device(obj)) - 1
)
height = end - start
if height:
u_list[start:end] = [False] * height
fill_u_list(accessories)
fill_u_list(dc_assets, lambda obj: obj[1])
return sum(u_list)
def get_pdus(self):
return DataCenterAsset.objects.select_related('model').filter(
rack=self,
orientation__in=(Orientation.left, Orientation.right),
position=0,
)
class DataCenterAsset(AutocompleteTooltipMixin, Asset):
_allow_in_dashboard = True
rack = models.ForeignKey(Rack, null=True, blank=True)
status = TransitionField(
default=DataCenterAssetStatus.new.id,
choices=DataCenterAssetStatus(),
)
position = models.IntegerField(null=True, blank=True)
orientation = models.PositiveIntegerField(
choices=Orientation(),
default=Orientation.front.id,
)
slot_no = models.CharField(
blank=True,
help_text=_('Fill it if asset is blade server'),
max_length=3,
null=True,
validators=[
RegexValidator(
regex=VALID_SLOT_NUMBER_FORMAT,
message=_(
"Slot number should be a number from range 1-16 with "
"an optional postfix 'A' or 'B' (e.g. '16A')"
),
code='invalid_slot_no'
)
],
verbose_name=_('slot number'),
)
connections = models.ManyToManyField(
'self',
through='Connection',
symmetrical=False,
)
source = models.PositiveIntegerField(
blank=True,
choices=AssetSource(),
db_index=True,
null=True,
verbose_name=_("source"),
)
delivery_date = models.DateField(null=True, blank=True)
production_year = models.PositiveSmallIntegerField(null=True, blank=True)
production_use_date = models.DateField(null=True, blank=True)
# Temporary solution until core functionality will not be fully migrated to
# NG
management_ip = NullableGenericIPAddressField(
verbose_name=_('Management IP address'),
help_text=_('Presented as string.'),
unique=True,
blank=True,
null=True,
default=None,
)
management_hostname = NullableCharField(
max_length=100, unique=True, null=True, blank=True
)
# @property
# def management_ip(self):
# """A property that gets management IP of a asset."""
# management_ip = self.ipaddress_set.filter(
# is_management=True
# ).order_by('-address').first()
# return management_ip.address if management_ip else ''
autocomplete_tooltip_fields = [
'rack',
'barcode',
'sn',
]
class Meta:
verbose_name = _('data center asset')
verbose_name_plural = _('data center assets')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Saved current rack value to check if changed.
self._rack_id = self.rack_id
def __str__(self):
return '{} (BC: {} / SN: {})'.format(
self.hostname or '-', self.barcode or '-', self.sn or '-'
)
def __repr__(self):
return '<DataCenterAsset: {}>'.format(self.id)
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
# When changing rack we search and save all descendants
if self.pk and self._rack_id != self.rack_id:
DataCenterAsset.objects.filter(parent=self).update(rack=self.rack)
def get_orientation_desc(self):
return Orientation.name_from_id(self.orientation)
@property
def is_blade(self):
if self.model_id and self.model.has_parent:
return True
return False
@property
def cores_count(self):
"""Returns cores count assigned to device in Ralph"""
asset_cores_count = self.model.cores_count if self.model else 0
return asset_cores_count
def _validate_orientation(self):
"""
Validate if orientation is valid for given position.
"""
if self.position is None:
return
if self.position == 0 and not Orientation.is_width(self.orientation):
msg = 'Valid orientations for picked position are: {}'.format(
', '.join(
choice.desc for choice in Orientation.WIDTH.choices
)
)
raise ValidationError({'orientation': [msg]})
if self.position > 0 and not Orientation.is_depth(self.orientation):
msg = 'Valid orientations for picked position are: {}'.format(
', '.join(
choice.desc for choice in Orientation.DEPTH.choices
)
)
raise ValidationError({'orientation': [msg]})
def _validate_position(self):
"""
Validate if position not empty when rack requires it.
"""
if (
self.rack and
self.position is None and
self.rack.require_position
):
msg = 'Position is required for this rack'
raise ValidationError({'position': [msg]})
def _validate_position_in_rack(self):
"""
Validate if position is in rack height range.
"""
if (
self.rack and
self.position is not None and
self.position > self.rack.max_u_height
):
msg = 'Position is higher than "max u height" = {}'.format(
self.rack.max_u_height,
)
raise ValidationError({'position': [msg]})
if self.position is not None and self.position < 0:
msg = 'Position should be 0 or greater'
raise ValidationError({'position': msg})
def _validate_slot_no(self):
if self.model_id:
if self.model.has_parent and not self.slot_no:
raise ValidationError({
'slot_no': 'Slot number is required when asset is blade'
})
if not self.model.has_parent and self.slot_no:
raise ValidationError({
'slot_no': (
'Slot number cannot be filled when asset is not blade'
)
})
def clean(self):
# TODO: this should be default logic of clean method;
# we could register somehow validators (or take each func with
# _validate prefix) and call it here
errors = {}
for validator in [
super().clean,
self._validate_orientation,
self._validate_position,
self._validate_position_in_rack,
self._validate_slot_no
]:
try:
validator()
except ValidationError as e:
e.update_error_dict(errors)
if errors:
raise ValidationError(errors)
def get_related_assets(self):
"""Returns the children of a blade chassis"""
orientations = [Orientation.front, Orientation.back]
assets_by_orientation = []
for orientation in orientations:
assets_by_orientation.append(list(
DataCenterAsset.objects.select_related('model').filter(
parent=self,
orientation=orientation,
model__has_parent=True,
).exclude(id=self.id)
))
assets = [
Gap.generate_gaps(assets) for assets in assets_by_orientation
]
return chain(*assets)
@classmethod
def get_autocomplete_queryset(cls):
return cls._default_manager.exclude(
status=DataCenterAssetStatus.liquidated.id
)
@classmethod
@transition_action(
verbose_name=_('Change rack'),
form_fields={
'rack': {
'field': forms.CharField(widget=AutocompleteWidget(
field=rack, admin_site=ralph_site
)),
}
}
)
def change_rack(cls, instances, request, **kwargs):
rack = Rack.objects.get(pk=kwargs['rack'])
for instance in instances:
instance.rack = rack
@classmethod
@transition_action(
verbose_name=_('Convert to BackOffice Asset'),
disable_save_object=True,
only_one_action=True,
form_fields={
'warehouse': {
'field': forms.CharField(label=_('Warehouse')),
'autocomplete_field': 'warehouse',
'autocomplete_model': 'back_office.BackOfficeAsset'
},
'region': {
'field': forms.CharField(label=_('Region')),
'autocomplete_field': 'region',
'autocomplete_model': 'back_office.BackOfficeAsset'
}
}
)
def convert_to_backoffice_asset(cls, instances, request, **kwargs):
with transaction.atomic():
for i, instance in enumerate(instances):
back_office_asset = BackOfficeAsset()
back_office_asset.region = Region.objects.get(
pk=kwargs['region']
)
back_office_asset.warehouse = Warehouse.objects.get(
pk=kwargs['warehouse']
)
move_parents_models(instance, back_office_asset)
# Save new asset to list, required to redirect url.
# RunTransitionView.get_success_url()
instances[i] = back_office_asset
class Connection(models.Model):
outbound = models.ForeignKey(
'DataCenterAsset',
verbose_name=_('connected to device'),
on_delete=models.PROTECT,
related_name='outbound_connections',
)
inbound = models.ForeignKey(
'DataCenterAsset',
verbose_name=_('connected device'),
on_delete=models.PROTECT,
related_name='inbound_connections',
)
# TODO: discuss
connection_type = models.PositiveIntegerField(
verbose_name=_('connection type'),
choices=ConnectionType()
)
def __str__(self):
return '%s -> %s (%s)' % (
self.outbound,
self.inbound,
self.connection_type
)
| 32.657754
| 79
| 0.603843
|
4edb8120c31792ef0be7b5a7140410caa61e44e0
| 12,653
|
py
|
Python
|
irlco/routing/train_kool.py
|
joerosenberg/copt-irl
|
8e79dd8ace16b8aaff32bb13662be6f3185b568a
|
[
"MIT"
] | 2
|
2021-06-05T15:26:19.000Z
|
2022-02-23T21:10:24.000Z
|
irlco/routing/train_kool.py
|
joerosenberg/copt-irl
|
8e79dd8ace16b8aaff32bb13662be6f3185b568a
|
[
"MIT"
] | null | null | null |
irlco/routing/train_kool.py
|
joerosenberg/copt-irl
|
8e79dd8ace16b8aaff32bb13662be6f3185b568a
|
[
"MIT"
] | 1
|
2021-07-19T08:41:10.000Z
|
2021-07-19T08:41:10.000Z
|
import torch
import wandb
import irlco.pointer_transformer as pt
from irlco.routing.baselines import greedy_rollout_baselines
from irlco.routing.data import CircuitSolutionDataset
from irlco.routing.env import BatchCircuitRoutingEnv, measures_to_terminal_rewards
from irlco.routing.policy import sample_best_of_n_trajectories, trajectory_action_probabilities, greedy_decode
import pickle
import os
from multiprocessing import freeze_support
from irlco.routing.reward import compute_shaping_terms, shaping_terms_to_rewards
def load_pickled_data(data_config_path, data_pickle_path):
if os.path.isfile(data_pickle_path):
with open(data_pickle_path, 'rb') as pickled_data:
data = pickle.load(pickled_data)
else:
data = CircuitSolutionDataset(data_config_path)
data.config = None # Get rid of yaml object so we can pickle
with open(data_pickle_path, 'wb') as pickled_data:
pickle.dump(data, pickled_data)
return data
if __name__ == '__main__':
# For multiprocessing support on Windows
freeze_support()
# Transformer model parameters
EMBEDDING_DIM = 64
NB_HEADS = 8
FF_DIM = 512
DROPOUT = 0.0
NB_ENCODER_LAYERS = 3
NB_DECODER_LAYERS = 3
# Environment parameters
MIN_INSTANCE_SIZE = 6
MAX_INSTANCE_SIZE = 9
# Training parameters
NB_INSTANCES_PER_BATCH = 512 # Number of unique circuit routing problems to consider in each batch
NB_TRAJECTORIES_PER_INSTANCE = 1 # Number of trajectories to sample for each unique circuit routing problem
BATCH_SIZE = NB_TRAJECTORIES_PER_INSTANCE * NB_INSTANCES_PER_BATCH
NB_EPISODES = 20_000
LR = 1e-4 # Optimizer learning rate
EPS = 1e-8 # Add when computing log-probabilities from probabilities to avoid numerical instability
DEVICE = torch.device('cuda')
ENTROPY_REGULARISATION_WEIGHT = 0.1
# Qualitative training parameters
BASELINE_METHOD = 'none' # 'greedy' for greedy rollouts or 'none'
REWARD_SHAPING_METHOD = 'none' # 'ail' for adversarial imitation learning or 'none'
SHARED_AIL_ENCODER = True # Whether or not to share the transformer encoder between the policy and discriminator
# Adversarial imitation learning (reward shaping) parameters
NB_EXPERT_SAMPLES = BATCH_SIZE # Keep it equal to batch size for now, so that the discriminator sees an equal
# amount of expert and non-expert data
USE_ACTION_PROBS_FOR_DISCRIMINATOR = False
# PPO surrogate loss clipping parameter
PPO_EPS = 0.2
# Test parameters
TEST_INTERVAL = 25
TEST_BATCH_SIZE = 256
TEST_DECODING_METHOD = 'greedy' # or 'sampling'
NB_TEST_SAMPLES = 128 # Number of samples to take if decoding method is 'sampling'
# Model saving interval
SAVE_INTERVAL = TEST_INTERVAL
# Data file paths
TEST_DATA_PATH = './data/test_data_config.yaml'
TEST_DATA_PICKLE_PATH = './data/pickle/test_data.pkl'
EXPERT_DATA_PATH = './data/irl_data_config.yaml'
EXPERT_DATA_PICKLE_PATH = './data/pickle/irl_data.pkl'
wandb.init(project='routing', config={
'embedding_dim': EMBEDDING_DIM,
'nb_heads': NB_HEADS,
'ff_dim': FF_DIM,
'dropout': DROPOUT,
'nb_encoder_layers': NB_ENCODER_LAYERS,
'nb_decoder_layers': NB_DECODER_LAYERS,
'min_instance_size': MIN_INSTANCE_SIZE,
'max_instance_size': MAX_INSTANCE_SIZE,
'nb_instances_per_batch': NB_INSTANCES_PER_BATCH,
'nb_trajectories_per_instance': NB_TRAJECTORIES_PER_INSTANCE,
'batch_size': BATCH_SIZE,
'learning_rate': LR,
'entropy_regularisation_weight': ENTROPY_REGULARISATION_WEIGHT,
'baseline_method': BASELINE_METHOD,
'reward_shaping_method': REWARD_SHAPING_METHOD,
'shared_ail_encoder': SHARED_AIL_ENCODER,
'nb_expert_samples': NB_EXPERT_SAMPLES,
'ppo_clipping_parameter': PPO_EPS,
'use_actions_probs_for_discriminator': USE_ACTION_PROBS_FOR_DISCRIMINATOR
})
# Environments for sampling unique problems, stepping forward during training, and testing
dummy_env = BatchCircuitRoutingEnv(NB_INSTANCES_PER_BATCH, MIN_INSTANCE_SIZE, MAX_INSTANCE_SIZE)
env = BatchCircuitRoutingEnv(BATCH_SIZE, MIN_INSTANCE_SIZE, MAX_INSTANCE_SIZE)
test_env = BatchCircuitRoutingEnv(TEST_BATCH_SIZE, MIN_INSTANCE_SIZE, MAX_INSTANCE_SIZE)
# Shared net for policy + shaped rewards
net = pt.KoolModel().cuda()
optimizer = torch.optim.Adam(net.parameters(), lr=LR)
wandb.watch(net)
# Make directory for saving model
os.mkdir(f'./saved_models/{wandb.run.name}')
if BASELINE_METHOD == 'greedy':
# Create baseline net with same parameters as net
baseline_net = pt.KoolModel()
baseline_net.load_state_dict(net.state_dict())
baseline_net.eval()
elif BASELINE_METHOD == 'none':
baseline_net = None
else:
raise Exception
# Variables for tracking baseline
best_success_rate = 0.0
# Load data files - if a pickled copy of the data exists, load that instead
test_data = load_pickled_data(TEST_DATA_PATH, TEST_DATA_PICKLE_PATH)
expert_data = load_pickled_data(EXPERT_DATA_PATH, EXPERT_DATA_PICKLE_PATH)
for i in range(NB_EPISODES):
''' Sample trajectories '''
# Sample NB_INSTANCES_PER_BATCH unique circuit routing problems
instances, _ = dummy_env.reset()
# Duplicate each problem NB_TRAJECTORIES_PER_INSTANCE times so we sample that many trajectories for each problem
states = env.reset(instances=instances.repeat(1, NB_TRAJECTORIES_PER_INSTANCE, 1))
base_pairs, _ = states
episode_length = base_pairs.shape[0]
# Sample trajectories according to policy given by net
if ENTROPY_REGULARISATION_WEIGHT > 0:
actions, action_probs, measures, successes, all_action_probs = net.sample_decode(states, env)
else:
actions, action_probs, measures, successes = sample_best_of_n_trajectories(env, states, net, 1)
''' Compute rewards and returns '''
# Compute terminal rewards for each solution
terminal_rewards = measures_to_terminal_rewards(episode_length, measures, successes=successes)
if REWARD_SHAPING_METHOD == 'ail':
# Get expert data for discriminator
expert_base_pairs, expert_actions, expert_measures = expert_data.get_batch(episode_length,
NB_EXPERT_SAMPLES, DEVICE)
expert_actions = expert_actions.T
# Get terminal rewards for expert solutions (they are guaranteed to be successful solutions, so we don't
# need to pass successes
expert_terminal_rewards = measures_to_terminal_rewards(episode_length, expert_measures)
# Concatenate policy data and expert data together so we can compute in a single batch
disc_base_pairs = torch.cat((base_pairs, expert_base_pairs), dim=1)
disc_actions = torch.cat((actions, expert_actions), dim=0)
disc_terminal_rewards = torch.cat((terminal_rewards, expert_terminal_rewards))
# trajectory_action_probabilities computes the probabilities that the current agent would take the expert's
# actions
expert_action_probs = trajectory_action_probabilities((expert_base_pairs, expert_actions), net).squeeze(2).T
disc_action_probs = torch.cat((action_probs, expert_action_probs), dim=0)
# Compute shaping terms for both agent and expert trajectories
disc_shaping_terms = compute_shaping_terms((disc_base_pairs, disc_actions), net)
# Compute rewards from shaping terms
disc_rewards = shaping_terms_to_rewards(disc_shaping_terms, disc_terminal_rewards).squeeze(2).T
# Calculate mean cross-entropy loss for the discriminator
if USE_ACTION_PROBS_FOR_DISCRIMINATOR:
is_expert_transition_probs = torch.exp(disc_rewards) / (
torch.exp(disc_rewards) + disc_action_probs.detach())
else:
is_expert_transition_probs = torch.exp(disc_rewards) / (1 + torch.exp(disc_rewards))
# Calculate misclassification rates for logging
false_positive_rate = (is_expert_transition_probs[:BATCH_SIZE, :] > 0.5).float().mean()
false_negative_rate = (is_expert_transition_probs[BATCH_SIZE:, :] < 0.5).float().mean()
wandb.log({'false_positive_rate': false_positive_rate, 'false_negative_rate': false_negative_rate},
commit=False)
discriminator_loss = - (
torch.sum(torch.log(1 - is_expert_transition_probs[:BATCH_SIZE, :])) +
torch.sum(torch.log(is_expert_transition_probs[BATCH_SIZE:, :]))
) / (BATCH_SIZE + NB_EXPERT_SAMPLES)
discriminator_loss.backward()
wandb.log({'discriminator_loss': discriminator_loss}, commit=False)
# Compute returns for agent
returns = torch.flip(torch.cumsum(torch.flip(disc_rewards[:BATCH_SIZE], [1]), 1), [1]).detach()
elif REWARD_SHAPING_METHOD == 'none':
returns = terminal_rewards.repeat(1, episode_length)
else:
raise Exception
# Compute baselines
if i > TEST_INTERVAL and BASELINE_METHOD == 'greedy':
baselines = greedy_rollout_baselines(base_pairs, actions, env, baseline_net, device=DEVICE)
else:
baselines = torch.zeros((BATCH_SIZE, episode_length), device=DEVICE)
''' Compute loss and update policy network '''
# Compute entropy penalty
if ENTROPY_REGULARISATION_WEIGHT > 0:
entropy_terms = torch.sum(all_action_probs * torch.log(all_action_probs + EPS), dim=2)
entropy_returns = torch.flip(torch.cumsum(torch.flip(entropy_terms, [1]), 1), [1])
returns = returns - ENTROPY_REGULARISATION_WEIGHT * entropy_returns
# Compute PPO loss
action_prob_ratios = action_probs / action_probs.detach()
ppo_terms = torch.min(action_prob_ratios * (returns - baselines),
torch.clamp(action_prob_ratios, 1 - PPO_EPS, 1 + PPO_EPS) * (returns - baselines))
policy_loss = - torch.sum(ppo_terms) / BATCH_SIZE
policy_loss.backward()
optimizer.step()
wandb.log({'policy_loss': policy_loss, 'mean_terminal_reward': terminal_rewards.mean(),
'success_rate': successes.float().mean()}, commit=False)
if i % TEST_INTERVAL == 0 and i != 0:
# For storing aggregate stats over all episode lengths:
overall_mean_optimality_gap = 0
overall_success_rate = 0
for test_episode_length in range(MIN_INSTANCE_SIZE, MAX_INSTANCE_SIZE + 1):
instances, solutions, test_measures = test_data.get_batch(test_episode_length, TEST_BATCH_SIZE, DEVICE)
test_states = test_env.reset(instances=instances)
with torch.no_grad():
_, _, measures, successes, _ = net.greedy_decode(test_states, test_env)
optimality_gaps = (1 - test_measures / measures).masked_fill(torch.logical_not(successes), 1)
mean_optimality_gap = optimality_gaps.mean() # For this instance size
success_rate = successes.float().mean() # For this instance size
wandb.log({f'mean_optimality_gap_{test_episode_length}': mean_optimality_gap,
f'success_rate_{test_episode_length}': success_rate}, commit=False)
overall_mean_optimality_gap += mean_optimality_gap
overall_success_rate += success_rate
overall_mean_optimality_gap = overall_mean_optimality_gap / (MAX_INSTANCE_SIZE + 1 - MIN_INSTANCE_SIZE)
overall_success_rate = overall_success_rate / (MAX_INSTANCE_SIZE + 1 - MIN_INSTANCE_SIZE)
wandb.log({f'overall_mean_optimality_gap': overall_mean_optimality_gap,
f'overall_success_rate': overall_success_rate}, commit=False)
if overall_success_rate > best_success_rate and BASELINE_METHOD != 'none':
best_success_rate = overall_success_rate
baseline_net.load_state_dict(net.state_dict())
if i % SAVE_INTERVAL == 0 and i != 0:
torch.save(net.state_dict(), f'./saved_models/{wandb.run.name}/{wandb.run.name}_step_{i}_model')
wandb.log({}) # Update log counter
| 48.853282
| 120
| 0.688295
|
5248e45a249101740e088181b73afc7711b3bdb5
| 5,135
|
py
|
Python
|
utils.py
|
singhb2020/sit-straight
|
73b28366269305b4b4ff91d216fbeb41d2190b8d
|
[
"MIT"
] | null | null | null |
utils.py
|
singhb2020/sit-straight
|
73b28366269305b4b4ff91d216fbeb41d2190b8d
|
[
"MIT"
] | null | null | null |
utils.py
|
singhb2020/sit-straight
|
73b28366269305b4b4ff91d216fbeb41d2190b8d
|
[
"MIT"
] | null | null | null |
# Utility Functions
# ------------------ Importing Libraries ------------------ #
import json
import numpy as np
import math
import tensorflow as tf
import os
import random
import playsound
# ------------------ Utility Functions ------------------ #
def open_thresholds():
"""
open_thresholds:
Returns the cailbrated threshold files to be used
"""
try:
threshold_file = open("thresholds.json", 'r')
thesholds = json.load(threshold_file)
threshold_file.close()
return thesholds
except Exception as e:
print(e)
print("An error has occured. Ensure the theshold file is created by calibrating.")
return None
def get_points_dictionary():
"""
get_points_dictionary:
Returns the mapped keypoint integer to each body part. Retrieved from TfHub.
"""
return {
"nose" : 0,
"left_eye": 1,
"right_eye" : 2,
"left_ear" : 3,
"right_ear" : 4,
"left_shoulder" : 5,
"right_shoulder" : 6,
"left_elbow" : 7,
"right_elbow" : 8,
"left_wrist" : 9,
"right_wrist" : 10,
"left_hip" : 11,
"right_hip" : 12,
"left_knee" : 13,
"right_knee" : 14,
"left_ankle" : 15,
"right_ankle" : 16
}
def get_dist_between(frame, keypoints, p1, p2):
"""
get_dist_between:
Determines the distance between two input points
"""
y, x, c = frame.shape
shaped = np.squeeze(np.multiply(keypoints, [y,x,1]))
POINTS = get_points_dictionary()
p1 = keypoints[0][0][POINTS[p1]]
p2 = keypoints[0][0][POINTS[p2]]
p1 = np.array(p1[:2]*[y,x]).astype(int)
p2 = np.array(p2[:2]*[y,x]).astype(int)
dist = math.sqrt((p1[0]-p2[0])**2 + (p1[1]-p2[1])**2)
return dist
def get_dist_values(frame, keypoints):
"""
get_dist_values:
Returns a list of distances from different keypoints on the upper body
"""
#Shoulder to Ear
dists_right_ear_dist = get_dist_between(frame, keypoints, "right_shoulder", 'right_ear')
dists_left_ear_dist = get_dist_between(frame, keypoints, "left_shoulder", 'left_ear')
#Shoulder to Nose
dists_right_nose_dist = get_dist_between(frame, keypoints, "right_shoulder", "nose")
dists_left_nose_dist = get_dist_between(frame, keypoints, "left_shoulder", "nose")
#Shoulder to Eyes
dists_right_eyes_dist = get_dist_between(frame, keypoints, "right_shoulder", "right_eye")
dists_left_eyes_dist = get_dist_between(frame, keypoints, "left_shoulder", "left_eye")
return [dists_right_ear_dist, dists_left_ear_dist, dists_right_nose_dist, dists_left_nose_dist, dists_right_eyes_dist, dists_left_eyes_dist]
def reshape_image(frame, model):
"""
reshape_image:
Reshaping the camera input frame to fit the model
"""
image = frame.copy()
image = tf.image.resize_with_pad( np.expand_dims(image, axis=0), model.input_dim[0], model.input_dim[1] )
input_image = tf.cast(image, dtype=tf.float32)
return input_image
def input_output_details(interpreter):
"""
input_output_details:
Used to get the details from the interperter
"""
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
return input_details, output_details
def make_prediction(interpreter, input_details, output_details, input_image):
"""
make_prediction:
Used to get the keypoints output from the provided image
"""
interpreter.set_tensor(input_details[0]['index'], np.array(input_image))
interpreter.invoke()
keypoints_with_score = interpreter.get_tensor(output_details[0]['index'])
return keypoints_with_score
def get_audio_list(filepath):
"""
get_audio_list:
Used to create a list of filepaths for all available audio recordings
"""
return [file.path for file in os.scandir(filepath)]
def play_audio_recording(audio_list):
"""
play_audio_recording:
An event trigger for when posture is bad longer than theshold. Plays pre-recorded audio files.
"""
audio_to_play = random.choice(audio_list)
print(audio_to_play)
playsound.playsound(audio_to_play)
def model_name_input():
"""
model_name_input:
Recursivly asks which model to use until valid model is provided. Not optimal, will change in future.
"""
model_name = input("What is the model you want to use? lightning fast but bad, thunder slow but good:\n")
if model_name == "lightning" or model_name == "thunder":
return str(model_name)
else:
print("Try again, not a valid model\n")
return model_name_input()
def calibration_input():
"""
calibration_input:
Used to determine whether to run the calibration or not.
"""
if not os.path.exists('thresholds.json'):
print("No calibration file exsists, running calibration\n")
return True
run_calibration = input("Do you want to run the calibration? Type yes otherwise defaults to no:\n")
if run_calibration == "yes":
return True
else:
return False
| 27.169312
| 144
| 0.66037
|
03cb69795335fd20c693abd8cf755776841c5b35
| 16,950
|
py
|
Python
|
code/client/munkilib/updatecheck/compare.py
|
dderusha/munki
|
ad3dd1673fc6544770e561b52000371113cd5294
|
[
"Apache-2.0"
] | 1
|
2019-01-13T22:36:59.000Z
|
2019-01-13T22:36:59.000Z
|
code/client/munkilib/updatecheck/compare.py
|
MarcelRaschke/munki
|
5ab55e81934cf081d369ab11df70d2ee215df33e
|
[
"Apache-2.0"
] | null | null | null |
code/client/munkilib/updatecheck/compare.py
|
MarcelRaschke/munki
|
5ab55e81934cf081d369ab11df70d2ee215df33e
|
[
"Apache-2.0"
] | null | null | null |
# encoding: utf-8
#
# Copyright 2009-2018 Greg Neagle.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
updatecheck.compare
Created by Greg Neagle on 2016-12-13.
Comparsion/checking functions used by updatecheck
"""
import os
from operator import itemgetter
from .. import display
from .. import munkihash
from .. import info
from .. import pkgutils
from .. import utils
from .. import FoundationPlist
ITEM_DOES_NOT_MATCH = VERSION_IS_LOWER = -1
ITEM_NOT_PRESENT = 0
ITEM_MATCHES = VERSION_IS_THE_SAME = 1
VERSION_IS_HIGHER = 2
def compare_versions(thisvers, thatvers):
"""Compares two version numbers to one another.
Returns:
-1 if thisvers is older than thatvers
1 if thisvers is the same as thatvers
2 if thisvers is newer than thatvers
"""
if (pkgutils.MunkiLooseVersion(thisvers) <
pkgutils.MunkiLooseVersion(thatvers)):
return VERSION_IS_LOWER
elif (pkgutils.MunkiLooseVersion(thisvers) ==
pkgutils.MunkiLooseVersion(thatvers)):
return VERSION_IS_THE_SAME
else:
return VERSION_IS_HIGHER
def compare_application_version(app):
"""Checks the given path if it's available,
otherwise uses LaunchServices and/or Spotlight to look for the app
Args:
app: dict with application bundle info
Returns:
0 if the app isn't installed
or doesn't have valid Info.plist
-1 if it's older
1 if the version is the same
2 if the version is newer
Raises utils.Error if there's an error in the input
"""
if 'path' in app:
filepath = os.path.join(app['path'], 'Contents', 'Info.plist')
if os.path.exists(filepath):
return compare_bundle_version(app)
display.display_debug2('%s doesn\'t exist.', filepath)
return ITEM_NOT_PRESENT
# no 'path' in dict
display.display_debug2('No path given for application item.')
# let's search:
name = app.get('CFBundleName', '')
bundleid = app.get('CFBundleIdentifier', '')
version_comparison_key = app.get(
'version_comparison_key', 'CFBundleShortVersionString')
versionstring = app.get(version_comparison_key)
if name == '' and bundleid == '':
# no path, no name, no bundleid. Error!
raise utils.Error(
'No path, application name or bundleid was specified!')
display.display_debug1(
'Looking for application %s with bundleid: %s, version %s...' %
(name, bundleid, versionstring))
# find installed apps that match this item by name or bundleid
appdata = info.filtered_app_data()
appinfo = [item for item in appdata
if (item['path'] and
(item['bundleid'] == bundleid or
(name and item['name'] == name)))]
if not appinfo:
# No matching apps found
display.display_debug1(
'\tFound no matching applications on the startup disk.')
return ITEM_NOT_PRESENT
# sort highest version first
try:
appinfo.sort(key=itemgetter('version'), reverse=True)
except KeyError:
# some item did not have a version key
pass
# iterate through matching applications
end_result = ITEM_NOT_PRESENT
for item in appinfo:
if 'name' in item:
display.display_debug2('\tFound name: \t %s', item['name'])
display.display_debug2('\tFound path: \t %s', item['path'])
display.display_debug2(
'\tFound CFBundleIdentifier: \t %s', item['bundleid'])
# create a test_app item with our found path
test_app = {}
test_app.update(app)
test_app['path'] = item['path']
compare_result = compare_bundle_version(test_app)
if compare_result in (VERSION_IS_THE_SAME, VERSION_IS_HIGHER):
return compare_result
elif compare_result == VERSION_IS_LOWER:
end_result = VERSION_IS_LOWER
# didn't find an app with the same or higher version
if end_result == VERSION_IS_LOWER:
display.display_debug1(
'An older version of this application is present.')
return end_result
def compare_bundle_version(item):
"""Compares a bundle version passed item dict.
Returns 0 if the bundle isn't installed
or doesn't have valid Info.plist
-1 if it's older
1 if the version is the same
2 if the version is newer
Raises utils.Error if there's an error in the input
"""
# look for an Info.plist inside the bundle
filepath = os.path.join(item['path'], 'Contents', 'Info.plist')
if not os.path.exists(filepath):
display.display_debug1('\tNo Info.plist found at %s', filepath)
filepath = os.path.join(item['path'], 'Resources', 'Info.plist')
if not os.path.exists(filepath):
display.display_debug1('\tNo Info.plist found at %s', filepath)
return ITEM_NOT_PRESENT
display.display_debug1('\tFound Info.plist at %s', filepath)
# just let comparePlistVersion do the comparison
saved_path = item['path']
item['path'] = filepath
compare_result = compare_plist_version(item)
item['path'] = saved_path
return compare_result
def compare_plist_version(item):
"""Gets the version string from the plist at path and compares versions.
Returns 0 if the plist isn't installed
-1 if it's older
1 if the version is the same
2 if the version is newer
Raises utils.Error if there's an error in the input
"""
version_comparison_key = item.get(
'version_comparison_key', 'CFBundleShortVersionString')
if 'path' in item and version_comparison_key in item:
versionstring = item[version_comparison_key]
filepath = item['path']
minupvers = item.get('minimum_update_version')
else:
raise utils.Error('Missing plist path or version!')
display.display_debug1('\tChecking %s for %s %s...',
filepath, version_comparison_key, versionstring)
if not os.path.exists(filepath):
display.display_debug1('\tNo plist found at %s', filepath)
return ITEM_NOT_PRESENT
try:
plist = FoundationPlist.readPlist(filepath)
except FoundationPlist.NSPropertyListSerializationException:
display.display_debug1('\t%s may not be a plist!', filepath)
return ITEM_NOT_PRESENT
if not hasattr(plist, 'get'):
display.display_debug1(
'plist not parsed as NSCFDictionary: %s', filepath)
return ITEM_NOT_PRESENT
if 'version_comparison_key' in item:
# specific key has been supplied,
# so use this to determine installed version
display.display_debug1(
'\tUsing version_comparison_key %s', version_comparison_key)
installedvers = pkgutils.getVersionString(
plist, version_comparison_key)
else:
# default behavior
installedvers = pkgutils.getVersionString(plist)
if installedvers:
display.display_debug1(
'\tInstalled item has version %s', installedvers)
if minupvers:
if compare_versions(installedvers, minupvers) < 1:
display.display_debug1(
'\tVersion %s too old < %s', installedvers, minupvers)
return ITEM_NOT_PRESENT
compare_result = compare_versions(installedvers, versionstring)
results = ['older', 'not installed?!', 'the same', 'newer']
display.display_debug1(
'\tInstalled item is %s.', results[compare_result + 1])
return compare_result
else:
display.display_debug1('\tNo version info in %s.', filepath)
return ITEM_NOT_PRESENT
def filesystem_item_exists(item):
"""Checks to see if a filesystem item exists.
If item has md5checksum attribute, compares on disk file's checksum.
Returns 0 if the filesystem item does not exist on disk,
Returns 1 if the filesystem item exists and the checksum matches
(or there is no checksum)
Returns -1 if the filesystem item exists but the checksum does not match.
Broken symlinks are OK; we're testing for the existence of the symlink,
not the item it points to.
Raises utils.Error is there's a problem with the input.
"""
if 'path' in item:
filepath = item['path']
display.display_debug1('Checking existence of %s...', filepath)
if os.path.lexists(filepath):
display.display_debug2('\tExists.')
if 'md5checksum' in item:
storedchecksum = item['md5checksum']
ondiskchecksum = munkihash.getmd5hash(filepath)
display.display_debug2('Comparing checksums...')
if storedchecksum == ondiskchecksum:
display.display_debug2('Checksums match.')
return ITEM_MATCHES
else:
display.display_debug2(
'Checksums differ: expected %s, got %s',
storedchecksum, ondiskchecksum)
return ITEM_DOES_NOT_MATCH
else:
return ITEM_MATCHES
else:
display.display_debug2('\tDoes not exist.')
return ITEM_NOT_PRESENT
else:
raise utils.Error('No path specified for filesystem item.')
def compare_item_version(item):
'''Compares an installs_item with what's on the startup disk.
Wraps other comparsion functions.
For applications, bundles, and plists:
Returns 0 if the item isn't installed
or doesn't have valid Info.plist
-1 if it's older
1 if the version is the same
2 if the version is newer
For other filesystem items:
Returns 0 if the filesystem item does not exist on disk,
1 if the filesystem item exists and the checksum matches
(or there is no checksum)
-1 if the filesystem item exists but the checksum does not match.
'''
if not 'VersionString' in item and 'CFBundleShortVersionString' in item:
# Ensure that 'VersionString', if not present, is populated
# with the value of 'CFBundleShortVersionString' if present
item['VersionString'] = item['CFBundleShortVersionString']
itemtype = item.get('type')
if itemtype == 'application':
return compare_application_version(item)
if itemtype == 'bundle':
return compare_bundle_version(item)
if itemtype == 'plist':
return compare_plist_version(item)
if itemtype == 'file':
return filesystem_item_exists(item)
raise utils.Error('Unknown installs item type: %s', itemtype)
def compare_receipt_version(item):
"""Determines if the given package is already installed.
Args:
item: dict with packageid; a 'com.apple.pkg.ServerAdminTools' style id
Returns 0 if the receipt isn't present
-1 if it's older
1 if the version is the same
2 if the version is newer
Raises utils.Error if there's an error in the input
"""
if item.get('optional'):
# receipt has been marked as optional, so it doesn't matter
# if it's installed or not. Return 1
# only check receipts not marked as optional
display.display_debug1(
'Skipping %s because it is marked as optional',
item.get('packageid', item.get('name')))
return VERSION_IS_THE_SAME
installedpkgs = pkgutils.getInstalledPackages()
if 'packageid' in item and 'version' in item:
pkgid = item['packageid']
vers = item['version']
else:
raise utils.Error('Missing packageid or version info!')
display.display_debug1('Looking for package %s, version %s', pkgid, vers)
installedvers = installedpkgs.get(pkgid)
if installedvers:
return compare_versions(installedvers, vers)
else:
display.display_debug1(
'\tThis package is not currently installed.')
return ITEM_NOT_PRESENT
def get_installed_version(item_plist):
"""Attempts to determine the currently installed version of an item.
Args:
item_plist: pkginfo plist of an item to get the version for.
Returns:
String version of the item, or 'UNKNOWN' if unable to determine.
"""
for receipt in item_plist.get('receipts', []):
# look for a receipt whose version matches the pkginfo version
if compare_versions(receipt.get('version', 0),
item_plist['version']) == 1:
pkgid = receipt['packageid']
display.display_debug2(
'Using receipt %s to determine installed version of %s',
pkgid, item_plist['name'])
return pkgutils.getInstalledPackageVersion(pkgid)
# try using items in the installs array to determine version
install_items_with_versions = [item
for item in item_plist.get('installs', [])
if 'CFBundleShortVersionString' in item]
for install_item in install_items_with_versions:
# look for an installs item whose version matches the pkginfo version
if compare_versions(install_item['CFBundleShortVersionString'],
item_plist['version']) == 1:
if install_item['type'] == 'application':
name = install_item.get('CFBundleName')
bundleid = install_item.get('CFBundleIdentifier')
display.display_debug2(
'Looking for application %s, bundleid %s',
name, install_item.get('CFBundleIdentifier'))
try:
# check default location for app
filepath = os.path.join(install_item['path'],
'Contents', 'Info.plist')
plist = FoundationPlist.readPlist(filepath)
return plist.get('CFBundleShortVersionString', 'UNKNOWN')
except (KeyError,
FoundationPlist.NSPropertyListSerializationException):
# that didn't work, fall through to the slow way
appinfo = []
appdata = info.app_data()
if appdata:
for ad_item in appdata:
if bundleid and ad_item['bundleid'] == bundleid:
appinfo.append(ad_item)
elif name and ad_item['name'] == name:
appinfo.append(ad_item)
maxversion = '0.0.0.0.0'
for ai_item in appinfo:
if ('version' in ai_item and
compare_versions(
ai_item['version'], maxversion) == 2):
# version is higher
maxversion = ai_item['version']
return maxversion
elif install_item['type'] == 'bundle':
display.display_debug2(
'Using bundle %s to determine installed version of %s',
install_item['path'], item_plist['name'])
filepath = os.path.join(install_item['path'],
'Contents', 'Info.plist')
try:
plist = FoundationPlist.readPlist(filepath)
return plist.get('CFBundleShortVersionString', 'UNKNOWN')
except FoundationPlist.NSPropertyListSerializationException:
pass
elif install_item['type'] == 'plist':
display.display_debug2(
'Using plist %s to determine installed version of %s',
install_item['path'], item_plist['name'])
try:
plist = FoundationPlist.readPlist(install_item['path'])
return plist.get('CFBundleShortVersionString', 'UNKNOWN')
except FoundationPlist.NSPropertyListSerializationException:
pass
# if we fall through to here we have no idea what version we have
return 'UNKNOWN'
if __name__ == '__main__':
print 'This is a library of support tools for the Munki Suite.'
| 38.965517
| 78
| 0.620059
|
1cdf9f361b05d1e7332400c288524328d77efbe5
| 3,477
|
py
|
Python
|
google_play_scraper/features/reviews.py
|
haideralipunjabi/google-play-scraper
|
fe98e0d029e35f1e09d720c78a999ee5aca4b7b0
|
[
"MIT"
] | null | null | null |
google_play_scraper/features/reviews.py
|
haideralipunjabi/google-play-scraper
|
fe98e0d029e35f1e09d720c78a999ee5aca4b7b0
|
[
"MIT"
] | null | null | null |
google_play_scraper/features/reviews.py
|
haideralipunjabi/google-play-scraper
|
fe98e0d029e35f1e09d720c78a999ee5aca4b7b0
|
[
"MIT"
] | null | null | null |
import json
from typing import Optional, Tuple
from google_play_scraper import Sort
from google_play_scraper.constants.element import ElementSpecs
from google_play_scraper.constants.regex import Regex
from google_play_scraper.constants.url import Formats
from google_play_scraper.utils.request import post
LANG_DEFAULT = "en"
COUNTRY_DEFAULT = "us"
SORT_DEFAULT = Sort.NEWEST
COUNT_DEFAULT = 100
class ContinuationToken:
__slots__ = "token", "lang", "country", "sort", "count", "filter_score_with"
def __init__(self, token, lang, country, sort, count, filter_score_with):
self.token = token
self.lang = lang
self.country = country
self.sort = sort
self.count = count
self.filter_score_with = filter_score_with
def unpack(self):
return (
self.token,
self.lang,
self.country,
self.sort,
self.count,
self.filter_score_with,
)
def _fetch_review_items(url, app_id, sort, count, filter_score_with, pagination_token):
dom = post(
url,
Formats.ReviewPayload.build(
app_id,
sort,
count,
"null" if filter_score_with is None else filter_score_with,
pagination_token,
),
{"content-type": "application/x-www-form-urlencoded"},
)
match = json.loads(Regex.REVIEWS.findall(dom)[0])
return json.loads(match[0][2])[0], json.loads(match[0][2])[-1][-1]
def reviews(
app_id,
lang=None,
country=None,
sort=None,
count=None,
filter_score_with=None,
continuation_token=None,
):
# type: (str, str, str, Sort, int, Optional[int], Optional[ContinuationToken]) -> Tuple[list, ContinuationToken]
if continuation_token is not None:
token = continuation_token.token
lang = continuation_token.lang if lang is None else lang
country = continuation_token.country if country is None else country
sort = continuation_token.sort if sort is None else sort
count = continuation_token.count if count is None else count
filter_score_with = (
continuation_token.filter_score_with
if filter_score_with is None
else filter_score_with
)
else:
token = None
if lang is None:
lang = LANG_DEFAULT
if country is None:
country = COUNTRY_DEFAULT
if sort is None:
sort = SORT_DEFAULT
if count is None:
count = COUNT_DEFAULT
if count < 200:
_count = count
else:
_count = 199
url = Formats.Reviews.build(lang=lang, country=country)
result = []
while True:
review_items, token = _fetch_review_items(
url, app_id, sort, _count, filter_score_with, token
)
for review in review_items:
review_dict = {}
for k, spec in ElementSpecs.Review.items():
review_dict[k] = spec.extract_content(review)
result.append(review_dict)
remaining_count_of_reviews_to_fetch = count - len(result)
if remaining_count_of_reviews_to_fetch == 0:
break
if isinstance(token, list):
break
if remaining_count_of_reviews_to_fetch < 200:
_count = remaining_count_of_reviews_to_fetch
return (
result,
ContinuationToken(token, lang, country, sort, count, filter_score_with),
)
| 26.340909
| 116
| 0.634167
|
ad46e09f89af5d91d30ec300a8df817ada36f93f
| 4,643
|
py
|
Python
|
ippon/group/group_permissions_test.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | null | null | null |
ippon/group/group_permissions_test.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | 13
|
2018-12-22T15:30:56.000Z
|
2022-03-12T00:22:31.000Z
|
ippon/group/group_permissions_test.py
|
morynicz/ippon_back
|
dce901bfc649c6f8efbbf0907654e0860606b3e3
|
[
"MIT"
] | 2
|
2019-06-01T11:28:23.000Z
|
2020-03-27T15:19:11.000Z
|
import datetime
import unittest
import django.test
from django.contrib.auth.models import User
import ippon.group.permissions as gp
import ippon.group.serializers as gs
import ippon.models.tournament as tm
class TestGroupPermissions(django.test.TestCase):
def setUp(self):
self.admin = User.objects.create(username='admin', password='password')
self.to = tm.Tournament.objects.create(name='T1', webpage='http://w1.co', description='d1', city='c1',
date=datetime.date(year=2021, month=1, day=1), address='a1',
team_size=1, group_match_length=3, ko_match_length=3,
final_match_length=3, finals_depth=0, age_constraint=5,
age_constraint_value=20, rank_constraint=5, rank_constraint_value=7,
sex_constraint=1)
self.group_phase = self.to.group_phases.create(fight_length=3)
self.group = self.group_phase.groups.create(name='G1')
self.request = unittest.mock.Mock()
self.view = unittest.mock.Mock()
self.view.kwargs = dict()
self.request.user = self.admin
self.request.data = gs.GroupSerializer(self.group).data
class TestGroupOwnerOrReadOnlyPermissions(TestGroupPermissions):
def setUp(self):
super(TestGroupOwnerOrReadOnlyPermissions, self).setUp()
self.permission = gp.IsGroupOwnerOrReadOnly()
class TestGroupOwnerOrReadOnlyPermissionNotAdmin(TestGroupOwnerOrReadOnlyPermissions):
def setUp(self):
super(TestGroupOwnerOrReadOnlyPermissionNotAdmin, self).setUp()
def test_permits_when_safe_method(self):
self.request.method = 'GET'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, True)
def test_doesnt_permit_when_unsafe_method(self):
self.request.method = 'PUT'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, False)
def test_doesnt_permit_when_post(self):
self.request.method = 'POST'
result = self.permission.has_permission(self.request, self.view)
self.assertEqual(result, False)
class TestGroupOwnerOrReadOnlyPermissionAdmin(TestGroupOwnerOrReadOnlyPermissions):
def setUp(self):
super(TestGroupOwnerOrReadOnlyPermissionAdmin, self).setUp()
tm.TournamentAdmin.objects.create(user=self.admin, tournament=self.to, is_master=False)
def test_permits_when_safe_method(self):
self.request.method = 'GET'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, True)
def test_does_permit_when_unsafe_method(self):
self.request.method = 'PUT'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, True)
def test_does_permit_when_post(self):
self.request.method = 'POST'
result = self.permission.has_permission(self.request, self.view)
self.assertEqual(result, True)
class TestGroupOwnerPermissions(TestGroupPermissions):
def setUp(self):
super(TestGroupOwnerPermissions, self).setUp()
self.permission = gp.IsGroupOwner()
class TestGroupOwnerPermissionNotAdmin(TestGroupOwnerPermissions):
def setUp(self):
super(TestGroupOwnerPermissionNotAdmin, self).setUp()
def test_doesnt_permit_when_unsafe_method(self):
self.request.method = 'PUT'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, False)
def test_doesnt_permit_general(self):
result = self.permission.has_permission(self.request, self.view)
self.assertEqual(result, False)
class TestGroupOwnerPermissionAdmin(TestGroupOwnerPermissions):
def setUp(self):
super(TestGroupOwnerPermissionAdmin, self).setUp()
tm.TournamentAdmin.objects.create(user=self.admin, tournament=self.to, is_master=False)
self.view.kwargs = dict(pk=self.group.pk)
def test_does_permit_when_unsafe_method(self):
self.request.method = 'PUT'
result = self.permission.has_object_permission(self.request, self.view, self.group)
self.assertEqual(result, True)
def test_does_permit_general(self):
self.request.method = 'PUT'
result = self.permission.has_permission(self.request, self.view)
self.assertEqual(result, True)
| 41.455357
| 115
| 0.69524
|
619e8a015532b50f73b1a204cfb3698c03c7fda0
| 7,438
|
py
|
Python
|
python_modules/libraries/dagster-dbt/dagster_dbt/dbt_resource.py
|
asamoal/dagster
|
08fad28e4b608608ce090ce2e8a52c2cf9dd1b64
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-dbt/dagster_dbt/dbt_resource.py
|
asamoal/dagster
|
08fad28e4b608608ce090ce2e8a52c2cf9dd1b64
|
[
"Apache-2.0"
] | null | null | null |
python_modules/libraries/dagster-dbt/dagster_dbt/dbt_resource.py
|
asamoal/dagster
|
08fad28e4b608608ce090ce2e8a52c2cf9dd1b64
|
[
"Apache-2.0"
] | null | null | null |
import logging
from abc import abstractmethod
from typing import Any, Dict, List, Optional
from dagster import get_dagster_logger
from .types import DbtOutput
class DbtResource:
"""Base class for a resource allowing users to interface with dbt"""
def __init__(
self,
logger: Optional[logging.Logger] = None,
):
"""Constructor
Args:
logger (Optional[Any]): A property for injecting a logger dependency.
Default is ``None``.
"""
self._logger = logger or get_dagster_logger()
def _format_params(
self, flags: Dict[str, Any], replace_underscores: bool = False
) -> Dict[str, Any]:
"""
Reformats arguments that are easier to express as a list into the format that dbt expects,
and deletes and keys with no value.
"""
# remove any keys with a value of None
if replace_underscores:
flags = {k.replace("_", "-"): v for k, v in flags.items() if v is not None}
else:
flags = {k: v for k, v in flags.items() if v is not None}
for param in ["select", "exclude", "models"]:
if param in flags:
if isinstance(flags[param], list):
# if it's a list, format as space-separated
flags[param] = " ".join(set(flags[param]))
return flags
@property
def logger(self) -> logging.Logger:
"""logging.Logger: A property for injecting a logger dependency."""
return self._logger
@abstractmethod
def compile(
self, models: Optional[List[str]] = None, exclude: Optional[List[str]] = None, **kwargs
) -> DbtOutput:
"""
Run the ``compile`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
models (List[str], optional): the models to include in compilation.
exclude (List[str]), optional): the models to exclude from compilation.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def run(
self, models: Optional[List[str]] = None, exclude: Optional[List[str]] = None, **kwargs
) -> DbtOutput:
"""
Run the ``run`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
models (List[str], optional): the models to include in the run.
exclude (List[str]), optional): the models to exclude from the run.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def snapshot(
self, select: Optional[List[str]] = None, exclude: Optional[List[str]] = None, **kwargs
) -> DbtOutput:
"""
Run the ``snapshot`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
select (List[str], optional): the snapshots to include in the run.
exclude (List[str], optional): the snapshots to exclude from the run.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def test(
self,
models: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
data: bool = True,
schema: bool = True,
**kwargs,
) -> DbtOutput:
"""
Run the ``test`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
models (List[str], optional): the models to include in testing.
exclude (List[str], optional): the models to exclude from testing.
data (bool, optional): If ``True`` (default), then run data tests.
schema (bool, optional): If ``True`` (default), then run schema tests.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def seed(
self,
show: bool = False,
select: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
**kwargs,
) -> DbtOutput:
"""
Run the ``seed`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
show (bool, optional): If ``True``, then show a sample of the seeded data in the
response. Defaults to ``False``.
select (List[str], optional): the snapshots to include in the run.
exclude (List[str], optional): the snapshots to exclude from the run.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def ls(
self,
select: Optional[List[str]] = None,
models: Optional[List[str]] = None,
exclude: Optional[List[str]] = None,
**kwargs,
) -> DbtOutput:
"""
Run the ``ls`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
select (List[str], optional): the resources to include in the output.
models (List[str], optional): the models to include in the output.
exclude (List[str], optional): the resources to exclude from the output.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def build(self, select: Optional[List[str]] = None, **kwargs) -> DbtOutput:
"""
Run the ``build`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
select (List[str], optional): the models/resources to include in the run.
Returns:
DbtOutput: object containing parsed output from dbt
"""
raise NotImplementedError()
@abstractmethod
def generate_docs(self, compile_project: bool = False, **kwargs) -> DbtOutput:
"""
Run the ``docs generate`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
compile_project (bool, optional): If true, compile the project before generating a catalog.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def run_operation(
self, macro: str, args: Optional[Dict[str, Any]] = None, **kwargs
) -> DbtOutput:
"""
Run the ``run-operation`` command on a dbt project. kwargs are passed in as additional parameters.
Args:
macro (str): the dbt macro to invoke.
args (Dict[str, Any], optional): the keyword arguments to be supplied to the macro.
Returns:
DbtOutput: object containing parsed output from dbt
"""
@abstractmethod
def get_run_results_json(self, **kwargs) -> Optional[Dict[str, Any]]:
"""
Get a parsed version of the run_results.json file for the relevant dbt project.
Returns:
Dict[str, Any]: dictionary containing the parsed contents of the run_results json file
for this dbt project.
"""
@abstractmethod
def get_manifest_json(self, **kwargs) -> Optional[Dict[str, Any]]:
"""
Get a parsed version of the manifest.json file for the relevant dbt project.
Returns:
Dict[str, Any]: dictionary containing the parsed contents of the manifest json file
for this dbt project.
"""
| 33.656109
| 106
| 0.594111
|
f6018de08cfe2aa928b825b005738309a4df4e16
| 5,884
|
py
|
Python
|
collatz/collatz.py
|
ale93111/Fractals
|
3a4b8c1102e531ce33b21b16ecfe26ed52712886
|
[
"MIT"
] | null | null | null |
collatz/collatz.py
|
ale93111/Fractals
|
3a4b8c1102e531ce33b21b16ecfe26ed52712886
|
[
"MIT"
] | null | null | null |
collatz/collatz.py
|
ale93111/Fractals
|
3a4b8c1102e531ce33b21b16ecfe26ed52712886
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import numpy as np
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
def collatz_even(n):
return n*2
def collatz_odd(n):
temp = int((n*2-1)/3)
if( temp % 2 and not (n*2-1)% 3 ):# and temp>1):
return temp
else:
return 0
def sumvect(r1,f1,r2,f2):
r = np.sqrt(r1*r1 + r2*r2 + 2*r1*r2*np.cos(f2 - f1))
f = f1 + np.arctan2(r2*np.sin(f2 - f1), r1 + r2*np.cos(f2 - f1))
return r,f
class Tree(object):
def makenlist(self,head):
head.nlist.append(self.n)#,self.r,self.f])
if(self.right):
self.right.makenlist(head)
if(self.left):
self.left.makenlist(head)
def makelinelist(self,head):
if(self.right):
head.linelist.append([self.x,self.y,self.right.x,self.right.y,self.stop,self.right.n]+list(self.right.color))
self.right.makelinelist(head)
if(self.left):
head.linelist.append([self.x,self.y,self.left.x ,self.left.y ,self.stop,self.left.n ]+list(self.left.color))
self.left.makelinelist(head)
def __init__(self, n=2,x=0,y=0,f=15/180*np.pi, color=(255,0,0,0),stop=10):
self.n = n
self.nlist = []
self.linelist = []
self.stop = stop
self.x = x
self.y = y
self.f = f
self.color = color
#self.nlist.append(n)
if(stop > 0):
a = collatz_even(n)
fa = self.f + 15/180*np.pi
xa = self.x + np.cos(fa)
ya = self.y + np.sin(fa)
self.right = Tree(a,xa,ya,fa,(255,0,0,0),stop-1)
#print("a=",a)
b = collatz_odd(n)
fb = self.f - 15/180*np.pi*np.log(2)/np.log(3)
xb = self.x + np.cos(fb)
yb = self.y + np.sin(fb)
if(b>1):# and (b not in self.nlist)):
self.left = Tree(b,xb,yb,fb,(0,0,255,0),stop-1)
#print("b=",b)
else:
self.left = None
else:
self.right = None
self.left = None
def __repr__(self):
return str(self.nlist)
#%%
n = 38
t = Tree(2,stop=n)
t.makenlist(t)
#print(t.nlist)
t.makelinelist(t)
print("\n len=",len(t.nlist),
"\n max=",np.max(t.nlist))
#%%
coord = np.array(t.linelist)
for j in range(len(coord)):
coord[j][:4] = 100*coord[j][:4]
for j in range(len(coord)):
coord[j] = [np.int(c) for c in coord[j]]
temp = np.transpose(coord)
#temp = np.array([c-np.min(c)+50 for c in temp],dtype=int) #offset
xoff = np.min([np.min(temp[0]),np.min(temp[2])])
yoff = np.min([np.min(temp[1]),np.min(temp[3])])
temp[0] = temp[0] - xoff
temp[2] = temp[2] - xoff
temp[1] = temp[1] - yoff
temp[3] = temp[3] - yoff
temp[:4] = temp[:4] + 50
#for j in range(len(temp)-2):
# temp[j] = temp[j] + 50
w = int(np.max([np.max(temp[0]),np.max(temp[2])]) +50)
h = int(np.max([np.max(temp[1]),np.max(temp[3])]) +50)
target = 1024
ratio = target/h
h_new = int(h*ratio)
w_new = int(w*ratio)
temp[:4] = ratio*temp[:4]
coord = np.transpose(temp)
coord = np.array(coord,dtype=int)
#del temp
#%%
from operator import itemgetter
coord2 = sorted(coord, key=itemgetter(4), reverse=True)
coord2t = np.transpose(coord2)
#%%
from PIL import Image, ImageDraw
im = Image.new('RGBA', (w_new, h_new), (0,0,0,0))
draw = ImageDraw.Draw(im)
count = 0
for j in range(coord2[0][4]):
start = coord2[0][4]-j
while True:
#if coord2[count][5] < np.average(t.nlist):
draw.line(list(coord2[count][:4]), fill=tuple(coord2[count][-4:]),width=3)
count += 1
if count==len(coord2):
break
new = coord2[count][4]
if start != new:
break
#im = im.resize((int(w/2),int(h/2)))
im.rotate(180).save("/home/alessandro/Documenti/Fractals/collatz/gif/collatz"+str(j)+".jpg")
#%%
fig = plt.figure(figsize=(12,8))
ax = fig.gca(projection='3d')
ax.set_xlim(0,w_new)
ax.set_ylim(0,h_new)
ax.set_zlim(0,n)
#ax.set_axis_off()
count = 0
for j in range(coord2[0][4]):
start = coord2[0][4]-j
while True:
#if coord2[count][5] < np.average(t.nlist):
c = "r" if coord2[count][6] else "b"
ax.plot( [coord2[count][0],coord2[count][2]],[coord2[count][1],coord2[count][3]],[n-coord2[count][4],n+1-coord2[count][4]],c)
#draw.line(list(coord2[count][:4]), fill=tuple(coord2[count][-4:]),width=3)
count += 1
if count==len(coord2):
break
new = coord2[count][4]
if start != new:
break
#im = im.resize((int(w/2),int(h/2)))
fig.savefig("/home/alessandro/Documenti/Fractals/collatz/gif3d/collatz"+str(j)+".jpg")
#%%
j = n
for angle in range(0, 36):
ax.view_init(30, 10*angle-50)
fig.savefig("/home/alessandro/Documenti/Fractals/collatz/gif3d/collatz"+str(j)+".jpg")
j += 1
#%%
fig = plt.figure()
ax = fig.gca(projection='3d')
for p in coord2:
c = "r" if p[6] else "b"
ax.plot( [p[0],p[2]],[p[1],p[3]],[35-p[4],36-p[4]],c)#, label='parametric curve')
ax.set_xlim(0,h_new)
ax.set_xlim(0,h_new)
ax.set_axis_off()
ax.view_init(30, 0)
#ax.legend()
#plt.show()
#%%
#%%
from PIL import Image, ImageDraw
im = Image.new('RGBA', (w+50, h+50), (0,0,0,0))
draw = ImageDraw.Draw(im)
for p in coord2:
draw.line(list(p[:4]), fill=tuple(p[-4:]),width=6)
#draw.rectangle([coord[0][0]-15,coord[1][0]-15,coord[0][0]+15,coord[1][0]+15],fill=(255,255,255,0))
#im.show()
#%%
im.rotate(180).save('/home/alessandro/Documents/Fractals/collatz/collatz.jpg')
#%%
from operator import itemgetter
prova = sorted(coord, key=itemgetter(4), reverse=True)
#%%
prova = [ [t.nlist[j+1]] + list(coord[j]) for j in range(len(coord))]
#%%
pairs = list(zip(t.nlist[::2], t.nlist[1::2]))
| 28.288462
| 133
| 0.566111
|
8422fd91fbedccedb29bdd544e9b9b67abf6d46a
| 4,124
|
py
|
Python
|
bin/rename-file-backup.py
|
richardzilincikPantheon/sdo_analysis
|
18c2c89d0dd41e30c3f4b34ee92febb4373f0bb4
|
[
"Apache-2.0"
] | null | null | null |
bin/rename-file-backup.py
|
richardzilincikPantheon/sdo_analysis
|
18c2c89d0dd41e30c3f4b34ee92febb4373f0bb4
|
[
"Apache-2.0"
] | null | null | null |
bin/rename-file-backup.py
|
richardzilincikPantheon/sdo_analysis
|
18c2c89d0dd41e30c3f4b34ee92febb4373f0bb4
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright The IETF Trust 2019, All Rights Reserved
# Copyright (c) 2018 Cisco and/or its affiliates.
# This software is licensed to you under the terms of the Apache License, Version 2.0 (the "License").
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# The code, technical concepts, and all information contained herein, are the property of Cisco Technology, Inc.
# and/or its affiliated entities, under various laws including copyright, international treaties, patent,
# and/or contract. Any use of the material herein must be in accordance with the terms of the License.
# All rights not expressly granted by the License are reserved.
# Unless required by applicable law or agreed to separately in writing, software distributed under the
# License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied.
__author__ = 'Benoit Claise'
__copyright__ = "Copyright(c) 2015-2018, Cisco Systems, Inc., Copyright The IETF Trust 2019, All Rights Reserved"
__license__ = "Apache V2.0"
__email__ = "bclaise@cisco.com"
import os
import shutil
import time
import argparse
import configparser
from datetime import datetime
# ----------------------------------------------------------------------
# Functions
# ----------------------------------------------------------------------
# ----------------------------------------------------------------------
# Main
# ----------------------------------------------------------------------
if __name__ == "__main__":
config = configparser.ConfigParser()
config._interpolation = configparser.ExtendedInterpolation()
config.read('/etc/yangcatalog/yangcatalog.conf')
web_private = config.get('Web-Section', 'private-directory')
backup_directory = config.get('Directory-Section', 'backup')
parser = argparse.ArgumentParser(description='Move file to the their creation time')
parser.add_argument("--documentpath", default= web_private + '/',
help="The optional directory where to find the file to backup. "
"Default is '" + web_private + "'")
parser.add_argument("--backuppath", default= backup_directory + '/',
help="The optional directory where to backup the file. "
"Default is '" + backup_directory + "'")
parser.add_argument("--debug", type=int, default=0, help="Debug level; the default is 0")
args = parser.parse_args()
debug_level = args.debug
name_to_backup = ['IETFYANGPageMain.html', 'HydrogenODLPageCompilation.html', 'HeliumODLPageCompilation.html', 'LithiumODLPageCompilation.html',
'IETFCiscoAuthorsYANGPageCompilation.html', 'IETFYANGOutOfRFC.html', 'IETFDraftYANGPageCompilation.html',
'IEEEStandardYANGPageCompilation.html','IEEEExperimentalYANGPageCompilation.html', 'YANGPageMain.html', 'IETFYANGRFC.html']
# name_to_backup = ['temp.html']
for file in name_to_backup:
file_no_extension = file.split(".")[0]
file_extension = file.split(".")[-1]
full_path_file = args.documentpath + file
if os.path.isfile(full_path_file):
modifiedTime = os.path.getmtime(full_path_file)
timestamp = (datetime.fromtimestamp(modifiedTime).strftime("%Y_%m_%d"))
if file_no_extension == 'IETFYANGRFC':
file_no_extension = 'IETFYANGOutOfRFC'
new_full_path_file = args.backuppath + file_no_extension + "_" + timestamp + "." + file_extension
if debug_level > 0:
print("file full path: " + full_path_file)
print("file without extension: " + file_no_extension)
print("file extension: " + file_extension)
print("full path: " + full_path_file)
print("last modified: %s" % time.ctime(os.path.getmtime(full_path_file)))
print("timestamp: " + str(timestamp))
print("new file name: " + new_full_path_file)
shutil.copy2(full_path_file, new_full_path_file)
else:
print("*** file " + full_path_file + " not present!")
| 49.686747
| 144
| 0.646945
|
8986b7038979974eec223c4eea91646cb89bf024
| 10,771
|
py
|
Python
|
pyasn1/type/char.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
pyasn1/type/char.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 4,640
|
2015-07-08T16:19:08.000Z
|
2019-12-02T15:01:27.000Z
|
pyasn1/type/char.py
|
keyrrjperino/google-spreadsheet-get
|
e00769d7fb715ff6e86c5404b97407d36ca3c786
|
[
"MIT"
] | 698
|
2015-06-02T19:18:35.000Z
|
2022-03-29T16:57:15.000Z
|
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
import sys
from pyasn1 import error
from pyasn1.type import tag
from pyasn1.type import univ
__all__ = ['NumericString', 'PrintableString', 'TeletexString', 'T61String', 'VideotexString',
'IA5String', 'GraphicString', 'VisibleString', 'ISO646String',
'GeneralString', 'UniversalString', 'BMPString', 'UTF8String']
NoValue = univ.NoValue
noValue = univ.noValue
class AbstractCharacterString(univ.OctetString):
"""Creates |ASN.1| schema or value object.
|ASN.1| objects are immutable and duck-type Python 2 :class:`unicode` or Python 3 :class:`str`.
When used in octet-stream context, |ASN.1| type assumes "|encoding|" encoding.
Keyword Args
------------
value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object
unicode object (Python 2) or string (Python 3), alternatively string
(Python 2) or bytes (Python 3) representing octet-stream of serialised
unicode string (note `encoding` parameter) or |ASN.1| class instance.
tagSet: :py:class:`~pyasn1.type.tag.TagSet`
Object representing non-default ASN.1 tag(s)
subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection`
Object representing non-default ASN.1 subtype constraint(s)
encoding: :py:class:`str`
Unicode codec ID to encode/decode :class:`unicode` (Python 2) or
:class:`str` (Python 3) the payload when |ASN.1| object is used
in octet-stream context.
Raises
------
:py:class:`~pyasn1.error.PyAsn1Error`
On constraint violation or bad initializer.
"""
if sys.version_info[0] <= 2:
def __str__(self):
try:
# `str` is Py2 text representation
return self._value.encode(self.encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
"Can't encode string '%s' with codec %s" % (self._value, self.encoding)
)
def __unicode__(self):
return unicode(self._value)
def prettyIn(self, value):
try:
if isinstance(value, unicode):
return value
elif isinstance(value, str):
return value.decode(self.encoding)
elif isinstance(value, (tuple, list)):
return self.prettyIn(''.join([chr(x) for x in value]))
elif isinstance(value, univ.OctetString):
return value.asOctets().decode(self.encoding)
else:
return unicode(value)
except (UnicodeDecodeError, LookupError):
raise error.PyAsn1Error(
"Can't decode string '%s' with codec %s" % (value, self.encoding)
)
def asOctets(self, padding=True):
return str(self)
def asNumbers(self, padding=True):
return tuple([ord(x) for x in str(self)])
else:
def __str__(self):
# `unicode` is Py3 text representation
return str(self._value)
def __bytes__(self):
try:
return self._value.encode(self.encoding)
except UnicodeEncodeError:
raise error.PyAsn1Error(
"Can't encode string '%s' with codec %s" % (self._value, self.encoding)
)
def prettyIn(self, value):
try:
if isinstance(value, str):
return value
elif isinstance(value, bytes):
return value.decode(self.encoding)
elif isinstance(value, (tuple, list)):
return self.prettyIn(bytes(value))
elif isinstance(value, univ.OctetString):
return value.asOctets().decode(self.encoding)
else:
return str(value)
except (UnicodeDecodeError, LookupError):
raise error.PyAsn1Error(
"Can't decode string '%s' with codec %s" % (value, self.encoding)
)
def asOctets(self, padding=True):
return bytes(self)
def asNumbers(self, padding=True):
return tuple(bytes(self))
#
# See OctetString.prettyPrint() for the explanation
#
def prettyOut(self, value):
return value
def prettyPrint(self, scope=0):
# first see if subclass has its own .prettyOut()
value = self.prettyOut(self._value)
if value is not self._value:
return value
return AbstractCharacterString.__str__(self)
def __reversed__(self):
return reversed(self._value)
class NumericString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class PrintableString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class TeletexString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class T61String(TeletexString):
__doc__ = TeletexString.__doc__
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class VideotexString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class IA5String(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class GraphicString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class VisibleString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26)
)
encoding = 'us-ascii'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class ISO646String(VisibleString):
__doc__ = VisibleString.__doc__
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class GeneralString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27)
)
encoding = 'iso-8859-1'
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class UniversalString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28)
)
encoding = "utf-32-be"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class BMPString(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30)
)
encoding = "utf-16-be"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
class UTF8String(AbstractCharacterString):
__doc__ = AbstractCharacterString.__doc__
#: Set (on class, not on instance) or return a
#: :py:class:`~pyasn1.type.tag.TagSet` object representing ASN.1 tag(s)
#: associated with |ASN.1| type.
tagSet = AbstractCharacterString.tagSet.tagImplicitly(
tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12)
)
encoding = "utf-8"
# Optimization for faster codec lookup
typeId = AbstractCharacterString.getTypeId()
| 33.450311
| 99
| 0.655464
|
1d3c68d048ff20e52276fa0470499092f91791bc
| 4,590
|
py
|
Python
|
shapeworld/datasets/nlvr_agreement/nlvr.py
|
shgoren/ShapeWorld
|
a6633bddbf195d0dc4cbdbe07564a98149337bcf
|
[
"MIT"
] | 1
|
2021-04-08T16:14:25.000Z
|
2021-04-08T16:14:25.000Z
|
shapeworld/datasets/nlvr_agreement/nlvr.py
|
shgoren/ShapeWorld
|
a6633bddbf195d0dc4cbdbe07564a98149337bcf
|
[
"MIT"
] | null | null | null |
shapeworld/datasets/nlvr_agreement/nlvr.py
|
shgoren/ShapeWorld
|
a6633bddbf195d0dc4cbdbe07564a98149337bcf
|
[
"MIT"
] | 1
|
2021-02-23T17:10:46.000Z
|
2021-02-23T17:10:46.000Z
|
from shapeworld import util
from shapeworld.dataset import Dataset
from shapeworld.datasets import nlvr_util
class NLVR(Dataset):
def __init__(self, directory):
world_size = tuple(next(nlvr_util.images_iter(directory=directory, mode='train'))[1][0].shape[:2])
self.description_size = 0
vocabulary = set()
for _, _, description, _ in nlvr_util.descriptions_iter(directory=directory, mode='train'):
self.description_size = max(self.description_size, len(description))
vocabulary.update(description)
vocabularies = dict(language=sorted(vocabulary))
super(NLVR, self).__init__(world_size=world_size, vectors=dict(description=self.description_size), vocabularies=vocabularies)
self.nlvr = {mode: nlvr_util.nlvr(directory=directory, mode=mode) for mode in ('train', 'validation', 'test')}
@property
def name(self):
return 'nlvr'
@property
def type(self):
return 'nlvr_agreement'
@property
def values(self):
return dict(world1='world', world2='world', world3='world', world_model1='model', world_model2='model', world_model3='model', description='language', description_length='int', description_model='model', agreement='float')
def generate(self, n, mode=None, noise_range=None, include_model=False, alternatives=False):
assert noise_range is None or noise_range == 0.0
batch = self.zero_batch(n, include_model=include_model, alternatives=alternatives)
unknown = self.words['[UNKNOWN]']
for i in range(n):
try:
worlds, world_models, description, agreement = next(self.nlvr[mode])
except StopIteration:
if i > 0:
return {key: value[:i] for key, value in batch.items()}
else:
return None
batch['world1'][i], batch['world2'][i], batch['world3'][i] = worlds
if include_model:
batch['world_model1'][i], batch['world_model2'][i], batch['world_model3'][i] = world_models
assert len(description) <= self.description_size
for w, word in enumerate(description):
batch['description'][i][w] = self.words.get(word, unknown)
batch['description_length'][i] = len(description)
batch['agreement'][i] = agreement
return batch
def get_html(self, generated):
id2word = self.vocabulary(value_type='language')
descriptions = generated['description']
description_lengths = generated['description_length']
agreements = generated['agreement']
data_html = list()
for n, (description, description_length, agreement) in enumerate(zip(descriptions, description_lengths, agreements)):
if agreement == 1.0:
agreement = 'correct'
elif agreement == 0.0:
agreement = 'incorrect'
else:
assert False
data_html.append('<div class="{agreement}"><div class="world"><img src="world1-{world}.bmp" alt="world1-{world}.bmp"></div><div class="vertical"></div><div class="world"><img src="world2-{world}.bmp" alt="world2-{world}.bmp"></div><div class="vertical"></div><div class="world"><img src="world3-{world}.bmp" alt="world3-{world}.bmp"></div><div class="num"><p><b>({num})</b></p></div><div class="description"><p>{description}</p></div></div>'.format(
agreement=agreement,
world=n,
num=(n + 1),
description=util.tokens2string(id2word[word] for word in description[:description_length])
))
html = '<!DOCTYPE html><html><head><title>{dtype} {name}</title><style>.data{{width: 100%; height: 100%;}} .correct{{width: 100%; margin-top: 1px; margin-bottom: 1px; background-color: #BBFFBB;}} .incorrect{{width: 100%; margin-top: 1px; margin-bottom: 1px; background-color: #FFBBBB;}} .world{{height: {world_height}px; display: inline-block; vertical-align: middle;}} .vertical{{display: inline-block; width: 1px; height: {world_height}px; background-color: #777777; vertical-align: middle;}} .num{{display: inline-block; vertical-align: middle; margin-left: 10px;}} .description{{display: inline-block; vertical-align: middle; margin-left: 10px;}}</style></head><body><div class="data">{data}</div></body></html>'.format(
dtype=self.type,
name=self.name,
world_height=self.world_shape[0],
data=''.join(data_html)
)
return html
dataset = NLVR
| 55.301205
| 732
| 0.632026
|
3dc115a890720210897bc1d4ef7106ddb77a2be6
| 715
|
py
|
Python
|
py/268. Missing Number.py
|
longwangjhu/LeetCode
|
a5c33e8d67e67aedcd439953d96ac7f443e2817b
|
[
"MIT"
] | 3
|
2021-08-07T07:01:34.000Z
|
2021-08-07T07:03:02.000Z
|
py/268. Missing Number.py
|
longwangjhu/LeetCode
|
a5c33e8d67e67aedcd439953d96ac7f443e2817b
|
[
"MIT"
] | null | null | null |
py/268. Missing Number.py
|
longwangjhu/LeetCode
|
a5c33e8d67e67aedcd439953d96ac7f443e2817b
|
[
"MIT"
] | null | null | null |
# https://leetcode.com/problems/missing-number/
# Given an array nums containing n distinct numbers in the range [0, n], return
# the only number in the range that is missing from the array.
# Follow up: Could you implement a solution using only O(1) extra space complexity
# and O(n) runtime complexity?
################################################################################
# XOR 0,..., n with nums
class Solution:
def missingNumber(self, nums: List[int]) -> int:
n = len(nums)
if n == 1: return 1 if nums[0] == 0 else 0
ans = 0
for i in range(1, n + 1):
ans ^= i
for num in nums:
ans ^= num
return ans
| 27.5
| 82
| 0.521678
|
b0244d7d7108bc762ddc7c8d277d87f1296374e2
| 1,367
|
py
|
Python
|
src/main/java/org/incoder/thrift/py/service.py
|
RootCluster/rc_netty
|
7de0fe9f5df452fbcbfcd658fc8046ad3ff2b429
|
[
"Apache-2.0"
] | null | null | null |
src/main/java/org/incoder/thrift/py/service.py
|
RootCluster/rc_netty
|
7de0fe9f5df452fbcbfcd658fc8046ad3ff2b429
|
[
"Apache-2.0"
] | null | null | null |
src/main/java/org/incoder/thrift/py/service.py
|
RootCluster/rc_netty
|
7de0fe9f5df452fbcbfcd658fc8046ad3ff2b429
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2019 The Jerry xu Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
__author__ = 'Jerry xu'
from person_handler import PersonHandler
from generate import PersonService
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TCompactProtocol
from thrift.server import TServer
try:
person_handler = PersonHandler()
processor = PersonService.Processor(person_handler)
serverSocket = TSocket.TServerSocket(port=9090)
transportFactory = TTransport.TFramedTransportFactory()
protocolFactory = TCompactProtocol.TCompactProtocolFactory()
server = TServer.TSimpleServer(processor, serverSocket, transportFactory, protocolFactory)
server.serve()
except Thrift.TException as tx:
print(tx.message)
| 34.175
| 94
| 0.773226
|
dac1fe357fb76a2953a3188c8e852dbb9bec95b2
| 181
|
py
|
Python
|
archive/2016/week10/tasks/sum.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | 6
|
2017-11-08T14:04:39.000Z
|
2019-03-24T22:11:04.000Z
|
archive/2016/week10/tasks/sum.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | null | null | null |
archive/2016/week10/tasks/sum.py
|
YAtOff/python0
|
b5af5004131d64dd52d42746eddb72b6c43a13c7
|
[
"Apache-2.0"
] | 7
|
2015-10-27T09:04:58.000Z
|
2019-03-03T14:18:26.000Z
|
"""
Дефинирайте фунцкия, която пресмята сумата на числата от 1 до N.
Използвайте while цикъл.
"""
def sum_1_to_n(n):
pass
print(sum_1_to_n(3)) # 6
assert sum_1_to_n(3) == 6
| 15.083333
| 64
| 0.696133
|
dd1832145122e3a436cdbdbfa119ca1255e68ad5
| 920
|
py
|
Python
|
webapp/webapp/urls.py
|
harshul1610/deeplearningdemos.ai
|
7c1491f5ea02a31b7fbcbd1223438f825ef1cbcc
|
[
"MIT"
] | 2
|
2019-01-11T06:46:24.000Z
|
2019-01-17T04:45:49.000Z
|
webapp/webapp/urls.py
|
harshul1610/deeplearningdemos.ai
|
7c1491f5ea02a31b7fbcbd1223438f825ef1cbcc
|
[
"MIT"
] | 15
|
2019-04-02T03:47:22.000Z
|
2022-02-09T23:45:03.000Z
|
webapp/webapp/urls.py
|
harshul1610/deeplearningdemos.ai
|
7c1491f5ea02a31b7fbcbd1223438f825ef1cbcc
|
[
"MIT"
] | null | null | null |
"""webapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('dogs_classification.urls')),
url(r'^', include('ocr_classification.urls')),
url(r'^', include('home.urls')),
]
| 35.384615
| 79
| 0.684783
|
3d4e766c39931c2aee7d42205ca62ccadd8c644b
| 17,335
|
py
|
Python
|
airflow/gcp/hooks/cloud_storage_transfer_service.py
|
mgasner/airflow
|
3bc3521756b8b8be027df05c476e727d86b63b7b
|
[
"Apache-2.0"
] | 1
|
2019-09-06T09:55:18.000Z
|
2019-09-06T09:55:18.000Z
|
airflow/gcp/hooks/cloud_storage_transfer_service.py
|
mgasner/airflow
|
3bc3521756b8b8be027df05c476e727d86b63b7b
|
[
"Apache-2.0"
] | null | null | null |
airflow/gcp/hooks/cloud_storage_transfer_service.py
|
mgasner/airflow
|
3bc3521756b8b8be027df05c476e727d86b63b7b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains a Google Storage Transfer Service Hook.
"""
import json
import time
import warnings
from copy import deepcopy
from datetime import timedelta
from typing import Dict, List, Tuple, Union, Optional
from googleapiclient.discovery import build
from airflow.exceptions import AirflowException
from airflow.contrib.hooks.gcp_api_base_hook import GoogleCloudBaseHook
# Time to sleep between active checks of the operation results
TIME_TO_SLEEP_IN_SECONDS = 10
class GcpTransferJobsStatus:
"""
Class with GCP Transfer jobs statuses.
"""
ENABLED = "ENABLED"
DISABLED = "DISABLED"
DELETED = "DELETED"
class GcpTransferOperationStatus:
"""
Class with GCP Transfer operations statuses.
"""
IN_PROGRESS = "IN_PROGRESS"
PAUSED = "PAUSED"
SUCCESS = "SUCCESS"
FAILED = "FAILED"
ABORTED = "ABORTED"
# A list of keywords used to build a request or response
ACCESS_KEY_ID = "accessKeyId"
ALREADY_EXISTING_IN_SINK = "overwriteObjectsAlreadyExistingInSink"
AWS_ACCESS_KEY = "awsAccessKey"
AWS_S3_DATA_SOURCE = 'awsS3DataSource'
BODY = 'body'
BUCKET_NAME = 'bucketName'
DAY = 'day'
DESCRIPTION = "description"
FILTER = 'filter'
FILTER_JOB_NAMES = 'job_names'
FILTER_PROJECT_ID = 'project_id'
GCS_DATA_SINK = 'gcsDataSink'
GCS_DATA_SOURCE = 'gcsDataSource'
HOURS = "hours"
HTTP_DATA_SOURCE = 'httpDataSource'
LIST_URL = 'list_url'
METADATA = 'metadata'
MINUTES = "minutes"
MONTH = 'month'
NAME = 'name'
OBJECT_CONDITIONS = 'object_conditions'
OPERATIONS = 'operations'
PROJECT_ID = 'projectId'
SCHEDULE = 'schedule'
SCHEDULE_END_DATE = 'scheduleEndDate'
SCHEDULE_START_DATE = 'scheduleStartDate'
SECONDS = "seconds"
SECRET_ACCESS_KEY = "secretAccessKey"
START_TIME_OF_DAY = 'startTimeOfDay'
STATUS = "status"
STATUS1 = 'status'
TRANSFER_JOB = 'transfer_job'
TRANSFER_JOB_FIELD_MASK = 'update_transfer_job_field_mask'
TRANSFER_JOBS = 'transferJobs'
TRANSFER_OPERATIONS = 'transferOperations'
TRANSFER_OPTIONS = 'transfer_options'
TRANSFER_SPEC = 'transferSpec'
YEAR = 'year'
NEGATIVE_STATUSES = {GcpTransferOperationStatus.FAILED, GcpTransferOperationStatus.ABORTED}
# noinspection PyAbstractClass
class GCPTransferServiceHook(GoogleCloudBaseHook):
"""
Hook for Google Storage Transfer Service.
All the methods in the hook where project_id is used must be called with
keyword arguments rather than positional.
"""
def __init__(
self,
api_version: str = 'v1',
gcp_conn_id: str = 'google_cloud_default',
delegate_to: str = None
) -> None:
super().__init__(gcp_conn_id, delegate_to)
self.api_version = api_version
self._conn = None
def get_conn(self):
"""
Retrieves connection to Google Storage Transfer service.
:return: Google Storage Transfer service object
:rtype: dict
"""
if not self._conn:
http_authorized = self._authorize()
self._conn = build(
'storagetransfer', self.api_version, http=http_authorized, cache_discovery=False
)
return self._conn
@GoogleCloudBaseHook.catch_http_exception
def create_transfer_job(self, body: Dict) -> Dict:
"""
Creates a transfer job that runs periodically.
:param body: (Required) A request body, as described in
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body
:type body: dict
:return: transfer job.
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob
:rtype: dict
"""
body = self._inject_project_id(body, BODY, PROJECT_ID)
return self.get_conn().transferJobs().create(body=body).execute( # pylint: disable=no-member
num_retries=self.num_retries)
@GoogleCloudBaseHook.fallback_to_default_project_id
@GoogleCloudBaseHook.catch_http_exception
def get_transfer_job(self, job_name: str, project_id: str = None) -> Dict:
"""
Gets the latest state of a long-running operation in Google Storage
Transfer Service.
:param job_name: (Required) Name of the job to be fetched
:type job_name: str
:param project_id: (Optional) the ID of the project that owns the Transfer
Job. If set to None or missing, the default project_id from the GCP
connection is used.
:type project_id: str
:return: Transfer Job
:rtype: dict
"""
assert project_id is not None
return (
self.get_conn() # pylint: disable=no-member
.transferJobs()
.get(jobName=job_name, projectId=project_id)
.execute(num_retries=self.num_retries)
)
def list_transfer_job(self, request_filter: Dict = None, **kwargs) -> List[Dict]:
"""
Lists long-running operations in Google Storage Transfer
Service that match the specified filter.
:param request_filter: (Required) A request filter, as described in
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter
:type request_filter: dict
:return: List of Transfer Jobs
:rtype: list[dict]
"""
# To preserve backward compatibility
# TODO: remove one day
if request_filter is None:
if 'filter' in kwargs:
request_filter = kwargs['filter']
assert isinstance(request_filter, Dict)
warnings.warn("Use 'request_filter' instead of 'filter'", DeprecationWarning)
else:
raise TypeError("list_transfer_job missing 1 required positional argument: 'request_filter'")
conn = self.get_conn()
request_filter = self._inject_project_id(request_filter, FILTER, FILTER_PROJECT_ID)
request = conn.transferJobs().list(filter=json.dumps(request_filter)) # pylint: disable=no-member
jobs = [] # type: List[Dict]
while request is not None:
response = request.execute(num_retries=self.num_retries)
jobs.extend(response[TRANSFER_JOBS])
request = conn.transferJobs().list_next(previous_request=request, # pylint: disable=no-member
previous_response=response)
return jobs
@GoogleCloudBaseHook.catch_http_exception
def update_transfer_job(self, job_name: str, body: Dict) -> Dict:
"""
Updates a transfer job that runs periodically.
:param job_name: (Required) Name of the job to be updated
:type job_name: str
:param body: A request body, as described in
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/patch#request-body
:type body: dict
:return: If successful, TransferJob.
:rtype: dict
"""
body = self._inject_project_id(body, BODY, PROJECT_ID)
return (
self.get_conn() # pylint: disable=no-member
.transferJobs()
.patch(jobName=job_name, body=body)
.execute(num_retries=self.num_retries)
)
@GoogleCloudBaseHook.fallback_to_default_project_id
@GoogleCloudBaseHook.catch_http_exception
def delete_transfer_job(self, job_name: str, project_id: str = None) -> None:
"""
Deletes a transfer job. This is a soft delete. After a transfer job is
deleted, the job and all the transfer executions are subject to garbage
collection. Transfer jobs become eligible for garbage collection
30 days after soft delete.
:param job_name: (Required) Name of the job to be deleted
:type job_name: str
:param project_id: (Optional) the ID of the project that owns the Transfer
Job. If set to None or missing, the default project_id from the GCP
connection is used.
:type project_id: str
:rtype: None
"""
assert project_id is not None
(
self.get_conn() # pylint: disable=no-member
.transferJobs()
.patch(
jobName=job_name,
body={
PROJECT_ID: project_id,
TRANSFER_JOB: {STATUS1: GcpTransferJobsStatus.DELETED},
TRANSFER_JOB_FIELD_MASK: STATUS1,
},
)
.execute(num_retries=self.num_retries)
)
@GoogleCloudBaseHook.catch_http_exception
def cancel_transfer_operation(self, operation_name: str) -> None:
"""
Cancels an transfer operation in Google Storage Transfer Service.
:param operation_name: Name of the transfer operation.
:type operation_name: str
:rtype: None
"""
self.get_conn().transferOperations().cancel( # pylint: disable=no-member
name=operation_name).execute(num_retries=self.num_retries)
@GoogleCloudBaseHook.catch_http_exception
def get_transfer_operation(self, operation_name: str) -> Dict:
"""
Gets an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:return: transfer operation
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/Operation
:rtype: dict
"""
return (
self.get_conn() # pylint: disable=no-member
.transferOperations()
.get(name=operation_name)
.execute(num_retries=self.num_retries)
)
@GoogleCloudBaseHook.catch_http_exception
def list_transfer_operations(self, request_filter: Dict = None, **kwargs) -> List[Dict]:
"""
Gets an transfer operation in Google Storage Transfer Service.
:param request_filter: (Required) A request filter, as described in
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs/list#body.QUERY_PARAMETERS.filter
With one additional improvement:
* project_id is optional if you have a project id defined
in the connection
See: :ref:`howto/connection:gcp`
:type request_filter: dict
:return: transfer operation
:rtype: list[dict]
"""
# To preserve backward compatibility
# TODO: remove one day
if request_filter is None:
if 'filter' in kwargs:
request_filter = kwargs['filter']
assert isinstance(request_filter, Dict)
warnings.warn("Use 'request_filter' instead of 'filter'", DeprecationWarning)
else:
raise TypeError(
"list_transfer_operations missing 1 required positional argument: 'request_filter'"
)
conn = self.get_conn()
request_filter = self._inject_project_id(request_filter, FILTER, FILTER_PROJECT_ID)
operations = [] # type: List[Dict]
request = conn.transferOperations().list( # pylint: disable=no-member
name=TRANSFER_OPERATIONS, filter=json.dumps(request_filter))
while request is not None:
response = request.execute(num_retries=self.num_retries)
if OPERATIONS in response:
operations.extend(response[OPERATIONS])
request = conn.transferOperations().list_next( # pylint: disable=no-member
previous_request=request, previous_response=response
)
return operations
@GoogleCloudBaseHook.catch_http_exception
def pause_transfer_operation(self, operation_name: str):
"""
Pauses an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:rtype: None
"""
self.get_conn().transferOperations().pause( # pylint: disable=no-member
name=operation_name).execute(num_retries=self.num_retries)
@GoogleCloudBaseHook.catch_http_exception
def resume_transfer_operation(self, operation_name: str):
"""
Resumes an transfer operation in Google Storage Transfer Service.
:param operation_name: (Required) Name of the transfer operation.
:type operation_name: str
:rtype: None
"""
self.get_conn().transferOperations().resume( # pylint: disable=no-member
name=operation_name).execute(num_retries=self.num_retries)
@GoogleCloudBaseHook.catch_http_exception
def wait_for_transfer_job(
self,
job: Dict,
expected_statuses: Tuple[str] = (GcpTransferOperationStatus.SUCCESS,),
timeout: Optional[Union[float, timedelta]] = None
) -> None:
"""
Waits until the job reaches the expected state.
:param job: Transfer job
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferJobs#TransferJob
:type job: dict
:param expected_statuses: State that is expected
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status
:type expected_statuses: set[str]
:param timeout: Time in which the operation must end in seconds. If not specified, defaults to 60
seconds.
:type timeout: Optional[Union[float, timedelta]]
:rtype: None
"""
if timeout is None:
timeout = 60
elif isinstance(timeout, timedelta):
timeout = timeout.total_seconds()
start_time = time.time()
while time.time() - start_time < timeout:
operations = self.list_transfer_operations(
request_filter={FILTER_PROJECT_ID: job[PROJECT_ID], FILTER_JOB_NAMES: [job[NAME]]}
)
if GCPTransferServiceHook.operations_contain_expected_statuses(operations, expected_statuses):
return
time.sleep(TIME_TO_SLEEP_IN_SECONDS)
raise AirflowException("Timeout. The operation could not be completed within the allotted time.")
def _inject_project_id(self, body: Dict, param_name: str, target_key: str) -> Dict:
body = deepcopy(body)
body[target_key] = body.get(target_key, self.project_id)
if not body.get(target_key):
raise AirflowException(
"The project id must be passed either as `{}` key in `{}` parameter or as project_id "
"extra in GCP connection definition. Both are not set!".format(target_key, param_name)
)
return body
@staticmethod
def operations_contain_expected_statuses(
operations: List[Dict],
expected_statuses: Union[Tuple[str], str]
) -> bool:
"""
Checks whether the operation list has an operation with the
expected status, then returns true
If it encounters operations in FAILED or ABORTED state
throw :class:`airflow.exceptions.AirflowException`.
:param operations: (Required) List of transfer operations to check.
:type operations: list[dict]
:param expected_statuses: (Required) status that is expected
See:
https://cloud.google.com/storage-transfer/docs/reference/rest/v1/transferOperations#Status
:type expected_statuses: set[str]
:return: If there is an operation with the expected state
in the operation list, returns true,
:raises: airflow.exceptions.AirflowException If it encounters operations
with a state in the list,
:rtype: bool
"""
expected_statuses_set = (
{expected_statuses} if isinstance(expected_statuses, str) else set(expected_statuses)
)
if not operations:
return False
current_statuses = {operation[METADATA][STATUS] for operation in operations}
if len(current_statuses - expected_statuses_set) != len(current_statuses):
return True
if len(NEGATIVE_STATUSES - current_statuses) != len(NEGATIVE_STATUSES):
raise AirflowException(
'An unexpected operation status was encountered. Expected: {}'.format(
", ".join(expected_statuses_set)
)
)
return False
| 37.849345
| 123
| 0.657629
|
1ff37139b14f59955ddb403732ffd18164cae9aa
| 64,171
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/test_show_fdb.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/nxos/tests/test_show_fdb.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/nxos/tests/test_show_fdb.py
|
svautour/genieparser
|
7416c9a4b44582be835a0646fb7fad92a5181c7d
|
[
"Apache-2.0"
] | null | null | null |
#!/bin/env python
import unittest
from unittest.mock import Mock
from pyats.topology import Device
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.nxos.show_fdb import ShowMacAddressTableVni, \
ShowMacAddressTable, ShowMacAddressTableAgingTime, \
ShowMacAddressTableLimit, ShowSystemInternalL2fwderMac
# ==================================================
# Unit test for:
# 'show mac address-table vni <WORD> | grep <WORD>'
# 'show mac address-table local vni <WORD>'
# 'show mac address-table'
# 'show mac address-table aging-time'
# 'show mac address-table limit'
# 'show system internal l2fwder mac'
# ==================================================
class test_show_mac_address_table_vni(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'mac_table': {
'vlans': {
'1001': {
'mac_addresses': {
'0000.04b1.0000': {
'entry': 'C',
'mac_address': '0000.04b1.0000',
'interfaces': {
'Nve1(10.9.0.101)': {
'age': '0',
'mac_type': 'dynamic',
'interface': 'Nve1(10.9.0.101)',
},
},
'ntfy': 'F',
'secure': 'F',
},
},
'vlan': '1001',
},
},
},
}
golden_output = {'execute.return_value': '''\
CH-P2-TOR-1# show mac address-table vni 2001001 | grep nve1
C 1001 0000.04b1.0000 dynamic 0 F F nve1(10.9.0.101)
'''
}
golden_parsed_output_1 = {
'mac_table': {
'vlans': {
'1001': {
'mac_addresses': {
'0000.0191.0000': {
'entry': '*',
'mac_address': '0000.0191.0000',
'ntfy': 'F',
'interfaces': {
'Ethernet1/11': {
'age': '0',
'mac_type': 'dynamic',
'interface': 'Ethernet1/11',
},
},
'secure': 'F',
},
'00f1.0000.0000': {
'entry': '*',
'mac_address': '00f1.0000.0000',
'ntfy': 'F',
'interfaces': {
'Ethernet1/11': {
'age': '0',
'mac_type': 'dynamic',
'interface': 'Ethernet1/11',
},
},
'secure': 'F',
},
'00f5.0000.0000': {
'entry': '*',
'mac_address': '00f5.0000.0000',
'ntfy': 'F',
'interfaces': {
'Ethernet1/11': {
'age': '0',
'mac_type': 'dynamic',
'interface': 'Ethernet1/11',
},
},
'secure': 'F',
},
},
'vlan': '1001',
},
},
},
}
golden_output_1 = {'execute.return_value': '''\
CH-P2-TOR-1# show mac address-table local vni 2001001
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False, C - ControlPlane MAC, ~ - vsan
VLAN MAC Address Type age Secure NTFY Ports
---------+-----------------+--------+---------+------+----+------------------
* 1001 0000.0191.0000 dynamic 0 F F Eth1/11
* 1001 00f1.0000.0000 dynamic 0 F F Eth1/11
* 1001 00f5.0000.0000 dynamic 0 F F Eth1/11
'''
}
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowMacAddressTableVni(device=self.device)
parsed_output = obj.parse(vni='2001001', interface='nve1')
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_golden_1(self):
self.maxDiff = None
self.device = Mock(**self.golden_output_1)
obj = ShowMacAddressTableVni(device=self.device)
parsed_output = obj.parse(vni='2001001')
self.assertEqual(parsed_output, self.golden_parsed_output_1)
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowMacAddressTableVni(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse(vni='2001001', interface='nve1')
class test_show_mac_address_table_aging_time(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {'mac_aging_time': 120}
golden_output = {'execute.return_value': '''\
N95_1# show mac address-table aging-time
Aging Time
----------
120
'''
}
def test_golden(self):
self.device = Mock(**self.golden_output)
obj = ShowMacAddressTableAgingTime(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowMacAddressTableAgingTime(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_mac_address_table(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'mac_table': {
'vlans': {
'-': {
'mac_addresses': {
'0000.dead.beef': {
'entry': 'G',
'mac_address': '0000.dead.beef',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'(R)': {
'age': '-',
'mac_type': 'static',
'interface': '(R)',
},
'Sup-eth1(R)(Lo0)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)(Lo0)',
},
},
'secure': 'F',
}
},
'vlan': '-',
},
'10': {
'mac_addresses': {
'aaaa.bbbb.cccc': {
'entry': '*',
'mac_address': 'aaaa.bbbb.cccc',
'ntfy': 'F',
'interfaces': {
'Ethernet1/2': {
'age': '-',
'mac_type': 'static',
'interface': 'Ethernet1/2',
},
},
'secure': 'F',
},
},
'vlan': '10',
},
'100': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '100',
},
'1000': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1000',
},
'1005': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1005',
},
'1006': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1006',
},
'1007': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1007',
},
'1008': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1008',
},
'1009': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1009',
},
'101': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '101',
},
'102': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '102',
},
'103': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '103',
},
'105': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '105',
},
'106': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '106',
},
'107': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '107',
},
'108': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '108',
},
'109': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '109',
},
'110': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '110',
},
'111': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '111',
},
'112': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '112',
},
'113': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '113',
},
'114': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '114',
},
'20': {
'mac_addresses': {
'aaaa.bbbb.cccc': {
'drop': {
'age': '-',
'drop': True,
'mac_type': 'static',
},
'entry': '*',
'mac_address': 'aaaa.bbbb.cccc',
'ntfy': 'F',
'secure': 'F',
},
},
'vlan': '20',
},
'30': {
'mac_addresses': {
'aaaa.bbbb.cccc': {
'drop': {
'age': '-',
'drop': True,
'mac_type': 'static',
},
'entry': '*',
'mac_address': 'aaaa.bbbb.cccc',
'ntfy': 'F',
'secure': 'F',
},
},
'vlan': '30',
},
'2000': {
'mac_addresses': {
'7e00.c000.0007': {
'mac_address': '7e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'vPC Peer-Link(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'vPC Peer-Link(R)',
},
},
'secure': 'F',
},
},
'vlan': '2000',
},
'3000': {
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '3000',
},
'4000': {
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '~~~',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '4000',
}
}
}
}
golden_output = {'execute.return_value': '''\
N95_1# show mac address-table
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False, C - ControlPlane MAC, ~ - vsan
VLAN MAC Address Type age Secure NTFY Ports
---------+-----------------+--------+---------+------+----+---------------
* 10 aaaa.bbbb.cccc static - F F Eth1/2
* 20 aaaa.bbbb.cccc static - F F Drop
* 30 aaaa.bbbb.cccc static - F F Drop
G - 0000.dead.beef static - F F sup-eth1(R)
G - 5e00.c000.0007 static - F F (R)
G - 5e00.c000.0007 static - F F sup-eth1(R) (Lo0)
G 100 5e00.c000.0007 static - F F sup-eth1(R)
G 101 5e00.c000.0007 static - F F sup-eth1(R)
G 102 5e00.c000.0007 static - F F sup-eth1(R)
G 103 5e00.c000.0007 static - F F sup-eth1(R)
G 105 5e00.c000.0007 static - F F sup-eth1(R)
G 106 5e00.c000.0007 static - F F sup-eth1(R)
G 107 5e00.c000.0007 static - F F sup-eth1(R)
G 108 5e00.c000.0007 static - F F sup-eth1(R)
G 109 5e00.c000.0007 static - F F sup-eth1(R)
G 110 5e00.c000.0007 static - F F sup-eth1(R)
G 111 5e00.c000.0007 static - F F sup-eth1(R)
G 112 5e00.c000.0007 static - F F sup-eth1(R)
G 113 5e00.c000.0007 static - F F sup-eth1(R)
G 114 5e00.c000.0007 static - F F sup-eth1(R)
G 1000 5e00.c000.0007 static - F F sup-eth1(R)
G 1005 5e00.c000.0007 static - F F sup-eth1(R)
G 1006 5e00.c000.0007 static - F F sup-eth1(R)
G 1007 5e00.c000.0007 static - F F sup-eth1(R)
G 1008 5e00.c000.0007 static - F F sup-eth1(R)
G 1009 5e00.c000.0007 static - F F sup-eth1(R)
2000 7e00.c000.0007 static - F F vPC Peer-Link(R)
3000 5e00.c000.0007 static - F F sup-eth1(R)
4000 5e00.c000.0007 static ~~~ F F sup-eth1(R)
'''
}
golden_output_address_interface_vlan = {'execute.return_value': '''
N95_1# show mac address-table address 5e00.c000.0007 interface ethernet1/2 vlan 1006
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False, C - ControlPlane MAC, ~ - vsan
VLAN MAC Address Type age Secure NTFY Ports
---------+-----------------+--------+---------+------+----+---------------
G 1006 5e00.c000.0007 static - F F Eth1/2
'''}
golden_parsed_output_address_interface_vlan = {
'mac_table': {
'vlans': {
'1006': {
'vlan': '1006',
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'entry': 'G',
'interfaces': {
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'mac_type': 'static',
'age': '-'
}
},
'secure': 'F',
'ntfy': 'F'
}
}
}
}
}
}
golden_output_address_interface = {'execute.return_value': '''
N95_1# show mac address-table address 5e00.c000.0007 interface ethernet1/2
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False, C - ControlPlane MAC, ~ - vsan
VLAN MAC Address Type age Secure NTFY Ports
---------+-----------------+--------+---------+------+----+---------------
G 102 5e00.c000.0007 static - F F Eth1/2
G 107 5e00.c000.0007 static - F F Eth1/2
G 1006 5e00.c000.0007 static - F F Eth1/2
'''}
golden_parsed_output_address_interface = {
'mac_table': {
'vlans': {
'102': {
'vlan': '102',
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'entry': 'G',
'interfaces': {
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'mac_type': 'static',
'age': '-'
}
},
'secure': 'F',
'ntfy': 'F'
}
}
},
'107': {
'vlan': '107',
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'entry': 'G',
'interfaces': {
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'mac_type': 'static',
'age': '-'
}
},
'secure': 'F',
'ntfy': 'F'
}
}
},
'1006': {
'vlan': '1006',
'mac_addresses': {
'5e00.c000.0007': {
'mac_address': '5e00.c000.0007',
'entry': 'G',
'interfaces': {
'Ethernet1/2': {
'interface': 'Ethernet1/2',
'mac_type': 'static',
'age': '-'
}
},
'secure': 'F',
'ntfy': 'F'
}
}
}
}
}
}
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowMacAddressTable(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowMacAddressTable(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
def test_address_interface_vlan(self):
self.device = Mock(**self.golden_output_address_interface_vlan)
obj = ShowMacAddressTable(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_address_interface_vlan)
def test_address_interface(self):
self.device = Mock(**self.golden_output_address_interface)
obj = ShowMacAddressTable(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output_address_interface)
class test_show_mac_address_table_limit(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'configured_system_action': 'Flood',
'configured_system_limit': 111,
'current_system_count': 3,
'currently_system_is': 'Flooding Unknown SA',
'mac_table': {
'vlans': {
'1': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1',
},
'10': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 1,
'currently': 'Flooding Unknown SA',
'vlan': '10',
},
'100': {
'cfg_action': 'Flood',
'conf_limit': 200,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '100',
},
'1000': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1000',
},
'1005': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1005',
},
'1006': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1006',
},
'1007': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1007',
},
'1008': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1008',
},
'1009': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '1009',
},
'101': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '101',
},
'102': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '102',
},
'103': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '103',
},
'104': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '104',
},
'105': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '105',
},
'106': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '106',
},
'107': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '107',
},
'108': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '108',
},
'109': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '109',
},
'110': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '110',
},
'111': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '111',
},
'112': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '112',
},
'113': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '113',
},
'114': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '114',
},
'115': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '115',
},
'185': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '185',
},
'20': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 1,
'currently': 'Flooding Unknown SA',
'vlan': '20',
},
'285': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '285',
},
'30': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 1,
'currently': 'Flooding Unknown SA',
'vlan': '30',
},
'910': {
'cfg_action': 'Flood',
'conf_limit': 196000,
'curr_count': 0,
'currently': 'Flooding Unknown SA',
'vlan': '910',
},
},
},
}
golden_output = {'execute.return_value': '''\
N95_1# show mac address-table limit
Configured System Limit: 111
Current System Count: 3
Configured System Action: Flood
Currently System is: Flooding Unknown SA
Vlan Conf Limit Curr Count Cfg Action Currently
---- ------------ --------- --------- --------
1 196000 0 Flood Flooding Unknown SA
10 196000 1 Flood Flooding Unknown SA
20 196000 1 Flood Flooding Unknown SA
30 196000 1 Flood Flooding Unknown SA
100 200 0 Flood Flooding Unknown SA
101 196000 0 Flood Flooding Unknown SA
102 196000 0 Flood Flooding Unknown SA
103 196000 0 Flood Flooding Unknown SA
104 196000 0 Flood Flooding Unknown SA
105 196000 0 Flood Flooding Unknown SA
106 196000 0 Flood Flooding Unknown SA
107 196000 0 Flood Flooding Unknown SA
108 196000 0 Flood Flooding Unknown SA
109 196000 0 Flood Flooding Unknown SA
110 196000 0 Flood Flooding Unknown SA
111 196000 0 Flood Flooding Unknown SA
112 196000 0 Flood Flooding Unknown SA
113 196000 0 Flood Flooding Unknown SA
114 196000 0 Flood Flooding Unknown SA
115 196000 0 Flood Flooding Unknown SA
185 196000 0 Flood Flooding Unknown SA
285 196000 0 Flood Flooding Unknown SA
910 196000 0 Flood Flooding Unknown SA
1000 196000 0 Flood Flooding Unknown SA
1005 196000 0 Flood Flooding Unknown SA
1006 196000 0 Flood Flooding Unknown SA
1007 196000 0 Flood Flooding Unknown SA
1008 196000 0 Flood Flooding Unknown SA
1009 196000 0 Flood Flooding Unknown SA
'''
}
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowMacAddressTableLimit(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowMacAddressTableLimit(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
class test_show_system_internal_l2fwder_mac(unittest.TestCase):
device = Device(name='aDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {
'mac_table': {
'vlans': {
'-': {
'mac_addresses': {
'5e00:c000:0007': {
'entry': 'G',
'mac_address': '5e00:c000:0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '-',
},
'10': {
'mac_addresses': {
'aaaa.bbbb.cccc': {
'entry': '*',
'mac_address': 'aaaa.bbbb.cccc',
'ntfy': 'F',
'interfaces': {
'Ethernet1/2': {
'age': '-',
'mac_type': 'static',
'interface': 'Ethernet1/2',
},
},
'secure': 'F',
},
},
'vlan': '10',
},
'100': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '100',
},
'1000': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1000',
},
'1005': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1005',
},
'1006': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1006',
},
'1007': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1007',
},
'1008': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1008',
},
'1009': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '1009',
},
'101': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '101',
},
'102': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '102',
},
'103': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '103',
},
'105': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '105',
},
'106': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '106',
},
'107': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '107',
},
'108': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '108',
},
'109': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '109',
},
'110': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '110',
},
'111': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '111',
},
'112': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '112',
},
'113': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '113',
},
'114': {
'mac_addresses': {
'5e00.c000.0007': {
'entry': 'G',
'mac_address': '5e00.c000.0007',
'ntfy': 'F',
'interfaces': {
'Sup-eth1(R)': {
'age': '-',
'mac_type': 'static',
'interface': 'Sup-eth1(R)',
},
},
'secure': 'F',
},
},
'vlan': '114',
},
},
},
}
golden_output = {'execute.return_value': '''\
N95_1# show system internal l2fwder mac
Legend:
* - primary entry, G - Gateway MAC, (R) - Routed MAC, O - Overlay MAC
age - seconds since last seen,+ - primary entry using vPC Peer-Link,
(T) - True, (F) - False, C - ControlPlane MAC
VLAN MAC Address Type age Secure NTFY Ports
---------+-----------------+--------+---------+------+----+---------------
G 114 5e00.c000.0007 static - F F sup-eth1(R)
G 112 5e00.c000.0007 static - F F sup-eth1(R)
G 113 5e00.c000.0007 static - F F sup-eth1(R)
G 110 5e00.c000.0007 static - F F sup-eth1(R)
G 111 5e00.c000.0007 static - F F sup-eth1(R)
G 108 5e00.c000.0007 static - F F sup-eth1(R)
G 109 5e00.c000.0007 static - F F sup-eth1(R)
G 106 5e00.c000.0007 static - F F sup-eth1(R)
G 107 5e00.c000.0007 static - F F sup-eth1(R)
G 105 5e00.c000.0007 static - F F sup-eth1(R)
G 102 5e00.c000.0007 static - F F sup-eth1(R)
G 103 5e00.c000.0007 static - F F sup-eth1(R)
G 100 5e00.c000.0007 static - F F sup-eth1(R)
G 101 5e00.c000.0007 static - F F sup-eth1(R)
G - 5e00:c000:0007 static - F F sup-eth1(R)
* 1 fa16.3eef.6e79 dynamic 00:01:02 F F Eth1/4
* 100 fa16.3eef.6e79 dynamic 00:05:38 F F Eth1/4
G 1008 5e00.c000.0007 static - F F sup-eth1(R)
G 1009 5e00.c000.0007 static - F F sup-eth1(R)
G 1006 5e00.c000.0007 static - F F sup-eth1(R)
G 1007 5e00.c000.0007 static - F F sup-eth1(R)
G 1005 5e00.c000.0007 static - F F sup-eth1(R)
G 1000 5e00.c000.0007 static - F F sup-eth1(R)
* 10 aaaa.bbbb.cccc static - F F Eth1/2
1 1 -00:00:de:ad:be:ef - 1
'''
}
def test_golden(self):
self.maxDiff = None
self.device = Mock(**self.golden_output)
obj = ShowSystemInternalL2fwderMac(device=self.device)
parsed_output = obj.parse()
self.assertEqual(parsed_output, self.golden_parsed_output)
def test_empty(self):
self.device = Mock(**self.empty_output)
obj = ShowSystemInternalL2fwderMac(device=self.device)
with self.assertRaises(SchemaEmptyParserError):
parsed_output = obj.parse()
if __name__ == '__main__':
unittest.main()
| 40.384519
| 92
| 0.279846
|
061aab78c993b6f562a4d7b03bbff6bd4bb6cb41
| 2,220
|
py
|
Python
|
vgg/vgg.py
|
TheRiddance/dcgan
|
c414696a38a48c3ff471e6ef56f04f9aef52e5c5
|
[
"BSD-3-Clause"
] | null | null | null |
vgg/vgg.py
|
TheRiddance/dcgan
|
c414696a38a48c3ff471e6ef56f04f9aef52e5c5
|
[
"BSD-3-Clause"
] | null | null | null |
vgg/vgg.py
|
TheRiddance/dcgan
|
c414696a38a48c3ff471e6ef56f04f9aef52e5c5
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2015-2017 Anish Athalye. Released under GPLv3.
# https://github.com/anishathalye/neural-style/
import tensorflow as tf
import numpy as np
import scipy.io
VGG19_LAYERS = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
def load_net(data_path):
data = scipy.io.loadmat(data_path)
mean = data['normalization'][0][0][0]
mean_pixel = np.mean(mean, axis=(0, 1))
weights = data['layers'][0]
return weights, mean_pixel
def net_preloaded(weights, input_image, pooling):
net = {}
current = input_image
for i, name in enumerate(VGG19_LAYERS):
kind = name[:4]
if kind == 'conv':
kernels, bias = weights[i][0][0][0][0]
# matconvnet: weights are [width, height, in_channels, out_channels]
# tensorflow: weights are [height, width, in_channels, out_channels]
kernels = np.transpose(kernels, (1, 0, 2, 3))
bias = bias.reshape(-1)
current = _conv_layer(current, kernels, bias)
elif kind == 'relu':
current = tf.nn.relu(current)
elif kind == 'pool':
current = _pool_layer(current, pooling)
net[name] = current
assert len(net) == len(VGG19_LAYERS)
return net
def _conv_layer(input, weights, bias):
conv = tf.nn.conv2d(input, tf.constant(weights), strides=(1, 1, 1, 1),
padding='SAME')
return tf.nn.bias_add(conv, bias)
def _pool_layer(input, pooling):
if pooling == 'avg':
return tf.nn.avg_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1),
padding='SAME')
else:
return tf.nn.max_pool(input, ksize=(1, 2, 2, 1), strides=(1, 2, 2, 1),
padding='SAME')
def preprocess(image, mean_pixel):
return image - mean_pixel
def unprocess(image, mean_pixel):
return image + mean_pixel
| 31.267606
| 80
| 0.603604
|
108ec807e3b0fd9d951472180469d564c0ae0992
| 348
|
py
|
Python
|
caldera/models/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 2
|
2021-12-13T17:52:17.000Z
|
2021-12-13T17:52:18.000Z
|
caldera/models/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | 4
|
2020-10-06T21:06:15.000Z
|
2020-10-10T01:18:23.000Z
|
caldera/models/__init__.py
|
jvrana/caldera
|
a346324e77f20739e00a82f97530dda4906f59dd
|
[
"MIT"
] | null | null | null |
r"""
Caldera models
.. autosummary::
:toctree: generated/
GraphEncoder
GraphCore
EncodeCoreDecode
"""
from caldera.models.encoder_core_decoder import EncodeCoreDecode
from caldera.models.graph_core import GraphCore
from caldera.models.graph_encoder import GraphEncoder
__all__ = ["EncodeCoreDecode", "GraphCore", "GraphEncoder"]
| 21.75
| 64
| 0.778736
|
25392393ab81b731301a38825bf2967e7e3be196
| 235
|
py
|
Python
|
src/aijack/attack/poison/__init__.py
|
Koukyosyumei/AIJack
|
9545d3828907b54965ede85e0e12cb32eef54294
|
[
"MIT"
] | 24
|
2021-11-17T02:16:47.000Z
|
2022-03-27T01:04:08.000Z
|
src/aijack/attack/poison/__init__.py
|
Koukyosyumei/AIJack
|
9545d3828907b54965ede85e0e12cb32eef54294
|
[
"MIT"
] | 9
|
2021-12-03T06:09:27.000Z
|
2022-03-29T06:33:53.000Z
|
src/aijack/attack/poison/__init__.py
|
Koukyosyumei/AIJack
|
9545d3828907b54965ede85e0e12cb32eef54294
|
[
"MIT"
] | 5
|
2022-01-12T09:58:04.000Z
|
2022-03-17T09:29:04.000Z
|
"""Subpackage for poisoning attack, which inserts malicious data to the training dataset,
so that the performance of the trained machine learning model will degregate.
"""
from .poison_attack import Poison_attack_sklearn # noqa: F401
| 47
| 89
| 0.808511
|
38d5cfd2a85869afbe7db7303c339df12c6c8fb1
| 684
|
py
|
Python
|
examples/src/Text/ReplacingText.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Text/ReplacingText.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
examples/src/Text/ReplacingText.py
|
aspose-slides/Aspose.Slides-for-Python-via-.NET
|
c55ad5c71f942598f1e67e22a52cbcd1cb286467
|
[
"MIT"
] | null | null | null |
import aspose.slides as slides
#ExStart:ReplacingText
# The path to the documents directory.
dataDir = "./examples/data/"
outDir = "./examples/out/"
# Instantiate Presentation class that represents PPTX
with slides.Presentation(dataDir + "text_default_fonts.pptx") as pres:
# Access first slide
sld = pres.slides[0]
# Iterate through shapes to find the placeholder
for shp in sld.shapes:
if shp.placeholder is not None:
# Change the text of each placeholder
shp.text_frame.text = "This is Placeholder"
# Save the PPTX to Disk
pres.save(outDir + "text_replacing_out.pptx", slides.export.SaveFormat.PPTX)
#ExEnd:ReplacingText
| 31.090909
| 80
| 0.71345
|
9c11fe7c5ac6eae4987d777d0e3eab52defc683b
| 889
|
py
|
Python
|
the_good_dollar/main/migrations/0001_initial.py
|
fouadsan/the_good_dollar
|
f68d2c0a54302d1bec2d8ae86ef180fc5e7f076a
|
[
"MIT"
] | null | null | null |
the_good_dollar/main/migrations/0001_initial.py
|
fouadsan/the_good_dollar
|
f68d2c0a54302d1bec2d8ae86ef180fc5e7f076a
|
[
"MIT"
] | null | null | null |
the_good_dollar/main/migrations/0001_initial.py
|
fouadsan/the_good_dollar
|
f68d2c0a54302d1bec2d8ae86ef180fc5e7f076a
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.5 on 2021-10-07 16:55
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SmallPub',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(blank=True, max_length=250, null=True)),
('image', models.ImageField(blank=True, null=True, upload_to='cat_imgs/')),
],
),
migrations.CreateModel(
name='Viewer',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('count', models.IntegerField(default=0)),
],
),
]
| 29.633333
| 117
| 0.565804
|
aa1986c75bb2b0121a117043e4f3fc8dfb6d2502
| 51,327
|
py
|
Python
|
Tests/test_seq.py
|
cbrueffer/biopython
|
1ffb1d92d4735166089e28ac07ee614d5ec80070
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_seq.py
|
cbrueffer/biopython
|
1ffb1d92d4735166089e28ac07ee614d5ec80070
|
[
"PostgreSQL"
] | null | null | null |
Tests/test_seq.py
|
cbrueffer/biopython
|
1ffb1d92d4735166089e28ac07ee614d5ec80070
|
[
"PostgreSQL"
] | null | null | null |
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
from __future__ import print_function
import array
import copy
import sys
import warnings
# Remove unittest2 import after dropping support for Python2.6
if sys.version_info < (2, 7):
try:
import unittest2 as unittest
except ImportError:
from Bio import MissingPythonDependencyError
raise MissingPythonDependencyError("Under Python 2.6 this test needs the unittest2 library")
else:
import unittest
from Bio import Alphabet
from Bio import Seq
from Bio.Alphabet import IUPAC, Gapped
from Bio.Data.IUPACData import ambiguous_dna_complement, ambiguous_rna_complement
from Bio.Data.IUPACData import ambiguous_dna_values, ambiguous_rna_values
from Bio.Data.CodonTable import TranslationError
from Bio.Data.CodonTable import standard_dna_table
from Bio.Seq import MutableSeq
if sys.version_info[0] == 3:
array_indicator = "u"
else:
array_indicator = "c"
test_seqs = [
Seq.Seq("TCAAAAGGATGCATCATG", IUPAC.unambiguous_dna),
Seq.Seq("T", IUPAC.ambiguous_dna),
Seq.Seq("ATGAAACTG"),
Seq.Seq("ATGAARCTG"),
Seq.Seq("AWGAARCKG"), # Note no U or T
Seq.Seq("".join(ambiguous_rna_values)),
Seq.Seq("".join(ambiguous_dna_values)),
Seq.Seq("".join(ambiguous_rna_values), Alphabet.generic_rna),
Seq.Seq("".join(ambiguous_dna_values), Alphabet.generic_dna),
Seq.Seq("".join(ambiguous_rna_values), IUPAC.IUPACAmbiguousRNA()),
Seq.Seq("".join(ambiguous_dna_values), IUPAC.IUPACAmbiguousDNA()),
Seq.Seq("AWGAARCKG", Alphabet.generic_dna),
Seq.Seq("AUGAAACUG", Alphabet.generic_rna),
Seq.Seq("ATGAAACTG", IUPAC.unambiguous_dna),
Seq.Seq("ATGAAA-CTG", Alphabet.Gapped(IUPAC.unambiguous_dna)),
Seq.Seq("ATGAAACTGWN", IUPAC.ambiguous_dna),
Seq.Seq("AUGAAACUG", Alphabet.generic_rna),
Seq.Seq("AUGAAA==CUG", Alphabet.Gapped(Alphabet.generic_rna, "=")),
Seq.Seq("AUGAAACUG", IUPAC.unambiguous_rna),
Seq.Seq("AUGAAACUGWN", IUPAC.ambiguous_rna),
Seq.Seq("ATGAAACTG", Alphabet.generic_nucleotide),
Seq.Seq("AUGAAACTG", Alphabet.generic_nucleotide), # U and T
Seq.MutableSeq("ATGAAACTG", Alphabet.generic_dna),
Seq.MutableSeq("AUGaaaCUG", IUPAC.unambiguous_rna),
Seq.Seq("ACTGTCGTCT", Alphabet.generic_protein),
]
protein_seqs = [
Seq.Seq("ATCGPK", IUPAC.protein),
Seq.Seq("T.CGPK", Alphabet.Gapped(IUPAC.protein, ".")),
Seq.Seq("T-CGPK", Alphabet.Gapped(IUPAC.protein, "-")),
Seq.Seq("MEDG-KRXR*", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
Seq.MutableSeq("ME-K-DRXR*XU", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
Seq.Seq("MEDG-KRXR@", Alphabet.HasStopCodon(Alphabet.Gapped(IUPAC.extended_protein, "-"), "@")),
Seq.Seq("ME-KR@", Alphabet.HasStopCodon(Alphabet.Gapped(IUPAC.protein, "-"), "@")),
Seq.Seq("MEDG.KRXR@", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "@"), ".")),
]
class TestSeq(unittest.TestCase):
def setUp(self):
self.s = Seq.Seq("TCAAAAGGATGCATCATG", IUPAC.unambiguous_dna)
def test_as_string(self):
"""Test converting Seq to string"""
self.assertEqual("TCAAAAGGATGCATCATG", str(self.s))
def test_construction_using_a_seq_object(self):
"""Test using a Seq object to initialize another Seq object"""
with self.assertRaises(TypeError):
Seq.Seq(self.s)
def test_repr(self):
"""Test representation of Seq object"""
self.assertEqual("Seq('TCAAAAGGATGCATCATG', IUPACUnambiguousDNA())",
repr(self.s))
def test_truncated_repr(self):
seq = "TCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGA"
expected = "Seq('TCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGATGCATCATG...GGA', IUPACAmbiguousDNA())"
self.assertEqual(expected, repr(Seq.Seq(seq, IUPAC.ambiguous_dna)))
def test_length(self):
"""Test len method on Seq object"""
self.assertEqual(18, len(self.s))
def test_first_nucleotide(self):
"""Test getting first nucleotide of Seq"""
self.assertEqual("T", self.s[0])
def test_last_nucleotide(self):
"""Test getting last nucleotide of Seq"""
self.assertEqual("G", self.s[-1])
def test_slicing(self):
"""Test slicing of Seq"""
self.assertEqual("AA", str(self.s[3:5]))
def test_reverse(self):
"""Test reverse using -1 stride"""
self.assertEqual("GTACTACGTAGGAAAACT", self.s[::-1])
def test_extract_third_nucleotide(self):
"""Test extracting every third nucleotide (slicing with stride 3)"""
self.assertEqual("TAGTAA", str(self.s[0::3]))
self.assertEqual("CAGGTT", str(self.s[1::3]))
self.assertEqual("AAACCG", str(self.s[2::3]))
def test_alphabet_letters(self):
"""Test nucleotides in DNA Seq"""
self.assertEqual("GATC", self.s.alphabet.letters)
def test_alphabet(self):
"""Test alphabet of derived Seq object"""
t = Seq.Seq("T", IUPAC.unambiguous_dna)
u = self.s + t
self.assertEqual("IUPACUnambiguousDNA()", str(u.alphabet))
def test_length_concatenated_unambiguous_seq(self):
"""Test length of concatenated Seq object with unambiguous DNA"""
t = Seq.Seq("T", IUPAC.unambiguous_dna)
u = self.s + t
self.assertEqual(19, len(u))
def test_concatenation_of_seq(self):
t = Seq.Seq("T", IUPAC.unambiguous_dna)
u = self.s + t
self.assertEqual(str(self.s) + "T", str(u))
def test_concatenation_error(self):
"""Test DNA Seq objects cannot be concatenated with Protein Seq
objects"""
with self.assertRaises(TypeError):
self.s + Seq.Seq("T", IUPAC.protein)
def test_concatenation_of_ambiguous_and_unambiguous_dna(self):
"""Test concatenated Seq object with ambiguous and unambiguous DNA
returns ambiguous Seq"""
t = Seq.Seq("T", IUPAC.ambiguous_dna)
u = self.s + t
self.assertEqual("IUPACAmbiguousDNA()", str(u.alphabet))
def test_ungap(self):
self.assertEqual("ATCCCA", str(Seq.Seq("ATC-CCA").ungap("-")))
with self.assertRaises(ValueError):
Seq.Seq("ATC-CCA").ungap("--")
with self.assertRaises(ValueError):
Seq.Seq("ATC-CCA").ungap()
class TestSeqStringMethods(unittest.TestCase):
def setUp(self):
self.s = Seq.Seq("TCAAAAGGATGCATCATG", IUPAC.unambiguous_dna)
self.dna = [
Seq.Seq("ATCG", IUPAC.ambiguous_dna),
Seq.Seq("gtca", Alphabet.generic_dna),
Seq.MutableSeq("GGTCA", Alphabet.generic_dna),
Seq.Seq("CTG-CA", Alphabet.Gapped(IUPAC.unambiguous_dna, "-")),
]
self.rna = [
Seq.Seq("AUUUCG", IUPAC.ambiguous_rna),
Seq.MutableSeq("AUUCG", IUPAC.ambiguous_rna),
Seq.Seq("uCAg", Alphabet.generic_rna),
Seq.MutableSeq("UC-AG", Alphabet.Gapped(Alphabet.generic_rna, "-")),
Seq.Seq("U.CAG", Alphabet.Gapped(Alphabet.generic_rna, ".")),
]
self.nuc = [Seq.Seq("ATCG", Alphabet.generic_nucleotide)]
self.protein = [
Seq.Seq("ATCGPK", IUPAC.protein),
Seq.Seq("atcGPK", Alphabet.generic_protein),
Seq.Seq("T.CGPK", Alphabet.Gapped(IUPAC.protein, ".")),
Seq.Seq("T-CGPK", Alphabet.Gapped(IUPAC.protein, "-")),
Seq.Seq("MEDG-KRXR*", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
Seq.MutableSeq("ME-K-DRXR*XU", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
Seq.Seq("MEDG-KRXR@", Alphabet.HasStopCodon(Alphabet.Gapped(IUPAC.extended_protein, "-"), "@")),
Seq.Seq("ME-KR@", Alphabet.HasStopCodon(Alphabet.Gapped(IUPAC.protein, "-"), "@")),
Seq.Seq("MEDG.KRXR@", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "@"), ".")),
]
self.test_chars = ["-", Seq.Seq("-"), Seq.Seq("*"), "-X@"]
def test_string_methods(self):
for a in self.dna + self.rna + self.nuc + self.protein:
if isinstance(a, Seq.Seq):
self.assertEqual(str(a.strip()), str(a).strip())
self.assertEqual(str(a.lstrip()), str(a).lstrip())
self.assertEqual(str(a.rstrip()), str(a).rstrip())
self.assertEqual(str(a.lower()), str(a).lower())
self.assertEqual(str(a.upper()), str(a).upper())
def test_hash(self):
with warnings.catch_warnings(record=True):
hash(self.s)
def test_equal_comparison_of_incompatible_alphabets(self):
"""Test __eq__ comparison method"""
with warnings.catch_warnings(record=True):
Seq.Seq("TCAAAA", IUPAC.ambiguous_dna) == Seq.Seq("TCAAAA", IUPAC.ambiguous_rna)
def test_not_equal_comparsion(self):
"""Test __ne__ comparison method"""
self.assertNotEqual(Seq.Seq("TCAAA", IUPAC.ambiguous_dna),
Seq.Seq("TCAAAA", IUPAC.ambiguous_dna))
def test_less_than_comparison_of_incompatible_alphabets(self):
"""Test __lt__ comparison method"""
seq1 = Seq.Seq("TCAAA", IUPAC.ambiguous_dna)
seq2 = Seq.Seq("UCAAAA", IUPAC.ambiguous_rna)
with warnings.catch_warnings(record=True):
self.assertTrue(seq1 < seq2)
def test_less_than_or_equal_comparison_of_incompatible_alphabets(self):
"""Test __lt__ comparison method"""
seq1 = Seq.Seq("TCAAA", IUPAC.ambiguous_dna)
seq2 = Seq.Seq("UCAAAA", IUPAC.ambiguous_rna)
with warnings.catch_warnings(record=True):
self.assertTrue(seq1 <= seq2)
def test_add_method_using_wrong_object(self):
with self.assertRaises(TypeError):
self.s + dict()
def test_radd_method(self):
self.assertEqual("TCAAAAGGATGCATCATGTCAAAAGGATGCATCATG", str(self.s.__radd__(self.s)))
def test_radd_method_using_incompatible_alphabets(self):
rna_seq = Seq.Seq("UCAAAA", IUPAC.ambiguous_rna)
with self.assertRaises(TypeError):
self.s.__radd__(rna_seq)
def test_radd_method_using_wrong_object(self):
with self.assertRaises(TypeError):
self.s.__radd__(dict())
def test_to_string_deprecated_method(self):
with warnings.catch_warnings(record=True):
self.s.tostring()
def test_contains_method(self):
self.assertTrue("AAAA" in self.s)
def test_startswith(self):
self.assertTrue(self.s.startswith("TCA"))
self.assertTrue(self.s.startswith(("CAA", "CTA"), 1))
def test_endswith(self):
self.assertTrue(self.s.endswith("ATG"))
self.assertTrue(self.s.endswith(("ATG", "CTA")))
def test_append_nucleotides(self):
self.test_chars.append(Seq.Seq("A", IUPAC.ambiguous_dna))
self.test_chars.append(Seq.Seq("A", IUPAC.ambiguous_rna))
self.test_chars.append(Seq.Seq("A", Alphabet.generic_nucleotide))
self.assertEqual(7, len(self.test_chars))
def test_append_proteins(self):
self.test_chars.append(Seq.Seq("K", Alphabet.generic_protein))
self.test_chars.append(Seq.Seq("K-", Alphabet.Gapped(Alphabet.generic_protein, "-")))
self.test_chars.append(Seq.Seq("K@", Alphabet.Gapped(IUPAC.protein, "@")))
self.assertEqual(7, len(self.test_chars))
def test_exception_when_clashing_alphabets(self):
"""Test by setting up clashing alphabet sequences"""
b = Seq.Seq("-", Alphabet.generic_nucleotide)
self.assertRaises(TypeError, self.protein[0].strip, b)
b = Seq.Seq("-", Alphabet.generic_protein)
self.assertRaises(TypeError, self.dna[0].strip, b)
def test_stripping_characters(self):
for a in self.dna + self.rna + self.nuc + self.protein:
for char in self.test_chars:
str_char = str(char)
if isinstance(a, Seq.Seq):
self.assertEqual(str(a.strip(char)), str(a).strip(str_char))
self.assertEqual(str(a.lstrip(char)), str(a).lstrip(str_char))
self.assertEqual(str(a.rstrip(char)), str(a).rstrip(str_char))
def test_finding_characters(self):
for a in self.dna + self.rna + self.nuc + self.protein:
for char in self.test_chars:
str_char = str(char)
if isinstance(a, Seq.Seq):
self.assertEqual(a.find(char), str(a).find(str_char))
self.assertEqual(a.find(char, 2, -2), str(a).find(str_char, 2, -2))
self.assertEqual(a.rfind(char), str(a).rfind(str_char))
self.assertEqual(a.rfind(char, 2, -2), str(a).rfind(str_char, 2, -2))
def test_counting_characters(self):
for a in self.dna + self.rna + self.nuc + self.protein:
for char in self.test_chars:
str_char = str(char)
if isinstance(a, Seq.Seq):
self.assertEqual(a.count(char), str(a).count(str_char))
self.assertEqual(a.count(char, 2, -2), str(a).count(str_char, 2, -2))
def test_splits(self):
for a in self.dna + self.rna + self.nuc + self.protein:
for char in self.test_chars:
str_char = str(char)
if isinstance(a, Seq.Seq):
self.assertEqual([str(x) for x in a.split(char)],
str(a).split(str_char))
self.assertEqual([str(x) for x in a.rsplit(char)],
str(a).rsplit(str_char))
for max_sep in [0, 1, 2, 999]:
self.assertEqual([str(x) for x in a.split(char, max_sep)],
str(a).split(str_char, max_sep))
class TestSeqAddition(unittest.TestCase):
def setUp(self):
self.dna = [
Seq.Seq("ATCG", IUPAC.ambiguous_dna),
Seq.Seq("gtca", Alphabet.generic_dna),
Seq.MutableSeq("GGTCA", Alphabet.generic_dna),
Seq.Seq("CTG-CA", Alphabet.Gapped(IUPAC.unambiguous_dna, "-")),
"TGGTCA",
]
self.rna = [
Seq.Seq("AUUUCG", IUPAC.ambiguous_rna),
Seq.MutableSeq("AUUCG", IUPAC.ambiguous_rna),
Seq.Seq("uCAg", Alphabet.generic_rna),
Seq.MutableSeq("UC-AG", Alphabet.Gapped(Alphabet.generic_rna, "-")),
Seq.Seq("U.CAG", Alphabet.Gapped(Alphabet.generic_rna, ".")),
"UGCAU",
]
self.nuc = [
Seq.Seq("ATCG", Alphabet.generic_nucleotide),
"UUUTTTACG",
]
self.protein = [
Seq.Seq("ATCGPK", IUPAC.protein),
Seq.Seq("atcGPK", Alphabet.generic_protein),
Seq.Seq("T.CGPK", Alphabet.Gapped(IUPAC.protein, ".")),
Seq.Seq("T-CGPK", Alphabet.Gapped(IUPAC.protein, "-")),
Seq.Seq("MEDG-KRXR*", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
Seq.MutableSeq("ME-K-DRXR*XU", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-")),
"TEDDF",
]
def test_addition_dna_rna_with_generic_nucleotides(self):
for a in self.dna + self.rna:
for b in self.nuc:
c = a + b
self.assertEqual(str(c), str(a) + str(b))
def test_addition_rna_with_rna(self):
self.rna.pop(3)
for a in self.rna:
for b in self.rna:
c = a + b
self.assertEqual(str(c), str(a) + str(b))
def test_exception_when_added_rna_has_more_than_one_gap_type(self):
"""Test resulting sequence has gap types '-' and '.'"""
with self.assertRaises(ValueError):
self.rna[3] + self.rna[4]
def test_addition_dna_with_dna(self):
for a in self.dna:
for b in self.dna:
c = a + b
self.assertEqual(str(c), str(a) + str(b))
def test_addition_dna_with_rna(self):
self.dna.pop(4)
self.rna.pop(5)
for a in self.dna:
for b in self.rna:
with self.assertRaises(TypeError):
a + b
with self.assertRaises(TypeError):
b + a
def test_addition_proteins(self):
self.protein.pop(2)
for a in self.protein:
for b in self.protein:
c = a + b
self.assertEqual(str(c), str(a) + str(b))
def test_exception_when_added_protein_has_more_than_one_gap_type(self):
"""Test resulting protein has gap types '-' and '.'"""
a = Seq.Seq("T.CGPK", Alphabet.Gapped(IUPAC.protein, "."))
b = Seq.Seq("T-CGPK", Alphabet.Gapped(IUPAC.protein, "-"))
with self.assertRaises(ValueError):
a + b
def test_exception_when_added_protein_has_more_than_one_stop_codon_type(self):
"""Test resulting protein has stop codon types '*' and '@'"""
a = Seq.Seq("MEDG-KRXR@", Alphabet.HasStopCodon(Alphabet.Gapped(IUPAC.extended_protein, "-"), "@"))
b = Seq.Seq("MEDG-KRXR*", Alphabet.Gapped(Alphabet.HasStopCodon(IUPAC.extended_protein, "*"), "-"))
with self.assertRaises(ValueError):
a + b
def test_exception_when_adding_protein_with_nucletides(self):
for a in self.protein[0:5]:
for b in self.dna[0:3] + self.rna[0:4]:
with self.assertRaises(TypeError):
a + b
def test_adding_generic_nucleotide_with_other_nucleotides(self):
for a in self.nuc:
for b in self.dna + self.rna + self.nuc:
c = a + b
self.assertEqual(str(c), str(a) + str(b))
class TestMutableSeq(unittest.TestCase):
def setUp(self):
self.s = Seq.Seq("TCAAAAGGATGCATCATG", IUPAC.unambiguous_dna)
self.mutable_s = MutableSeq("TCAAAAGGATGCATCATG", IUPAC.ambiguous_dna)
def test_mutableseq_creation(self):
"""Test creating MutableSeqs in multiple ways"""
mutable_s = MutableSeq("TCAAAAGGATGCATCATG", IUPAC.ambiguous_dna)
self.assertIsInstance(mutable_s, MutableSeq, "Creating MutableSeq")
mutable_s = self.s.tomutable()
self.assertIsInstance(mutable_s, MutableSeq, "Converting Seq to mutable")
array_seq = MutableSeq(array.array(array_indicator, "TCAAAAGGATGCATCATG"),
IUPAC.ambiguous_dna)
self.assertIsInstance(array_seq, MutableSeq, "Creating MutableSeq using array")
def test_repr(self):
self.assertEqual("MutableSeq('TCAAAAGGATGCATCATG', IUPACAmbiguousDNA())",
repr(self.mutable_s))
def test_truncated_repr(self):
seq = "TCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGA"
expected = "MutableSeq('TCAAAAGGATGCATCATGTCAAAAGGATGCATCATGTCAAAAGGATGCATCATG...GGA', IUPACAmbiguousDNA())"
self.assertEqual(expected, repr(MutableSeq(seq, IUPAC.ambiguous_dna)))
def test_equal_comparison(self):
"""Test __eq__ comparison method"""
self.assertEqual(self.mutable_s, "TCAAAAGGATGCATCATG")
def test_equal_comparison_of_incompatible_alphabets(self):
with warnings.catch_warnings(record=True):
self.mutable_s == MutableSeq('UCAAAAGGA', IUPAC.ambiguous_rna)
def test_not_equal_comparison(self):
"""Test __ne__ comparison method"""
self.assertNotEqual(self.mutable_s, "other thing")
def test_less_than_comparison(self):
"""Test __lt__ comparison method"""
self.assertTrue(self.mutable_s[:-1] < self.mutable_s)
def test_less_than_comparison_of_incompatible_alphabets(self):
with warnings.catch_warnings(record=True):
self.mutable_s[:-1] < MutableSeq("UCAAAAGGAUGCAUCAUG", IUPAC.ambiguous_rna)
def test_less_than_comparison_without_alphabet(self):
self.assertTrue(self.mutable_s[:-1] < "TCAAAAGGATGCATCATG")
def test_less_than_or_equal_comparison(self):
"""Test __le__ comparison method"""
self.assertTrue(self.mutable_s[:-1] <= self.mutable_s)
def test_less_than_or_equal_comparison_of_incompatible_alphabets(self):
with warnings.catch_warnings(record=True):
self.mutable_s[:-1] <= MutableSeq("UCAAAAGGAUGCAUCAUG", IUPAC.ambiguous_rna)
def test_less_than_or_equal_comparison_without_alphabet(self):
self.assertTrue(self.mutable_s[:-1] <= "TCAAAAGGATGCATCATG")
def test_add_method(self):
"""Test adding wrong type to MutableSeq"""
with self.assertRaises(TypeError):
self.mutable_s + 1234
def test_radd_method(self):
self.assertEqual("TCAAAAGGATGCATCATGTCAAAAGGATGCATCATG",
self.mutable_s.__radd__(self.mutable_s))
def test_radd_method_incompatible_alphabets(self):
with self.assertRaises(TypeError):
self.mutable_s.__radd__(MutableSeq("UCAAAAGGA", IUPAC.ambiguous_rna))
def test_radd_method_using_seq_object(self):
self.assertEqual("TCAAAAGGATGCATCATGTCAAAAGGATGCATCATG",
self.mutable_s.__radd__(self.s))
def test_radd_method_wrong_type(self):
with self.assertRaises(TypeError):
self.mutable_s.__radd__(1234)
def test_as_string(self):
self.assertEqual("TCAAAAGGATGCATCATG", str(self.mutable_s))
def test_length(self):
self.assertEqual(18, len(self.mutable_s))
def test_converting_to_immutable(self):
self.assertIsInstance(self.mutable_s.toseq(), Seq.Seq)
def test_first_nucleotide(self):
self.assertEqual('T', self.mutable_s[0])
def test_setting_slices(self):
self.assertEqual(MutableSeq('CAAA', IUPAC.ambiguous_dna),
self.mutable_s[1:5], "Slice mutable seq")
self.mutable_s[1:3] = "GAT"
self.assertEqual(MutableSeq("TGATAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s,
"Set slice with string and adding extra nucleotide")
self.mutable_s[1:3] = self.mutable_s[5:7]
self.assertEqual(MutableSeq("TAATAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s, "Set slice with MutableSeq")
self.mutable_s[1:3] = array.array(array_indicator, "GAT")
self.assertEqual(MutableSeq("TGATTAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s, "Set slice with array")
def test_setting_item(self):
self.mutable_s[3] = "G"
self.assertEqual(MutableSeq("TCAGAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s)
def test_deleting_slice(self):
del self.mutable_s[4:5]
self.assertEqual(MutableSeq("TCAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s)
def test_deleting_item(self):
del self.mutable_s[3]
self.assertEqual(MutableSeq("TCAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s)
def test_appending(self):
self.mutable_s.append("C")
self.assertEqual(MutableSeq("TCAAAAGGATGCATCATGC", IUPAC.ambiguous_dna),
self.mutable_s)
def test_inserting(self):
self.mutable_s.insert(4, "G")
self.assertEqual(MutableSeq("TCAAGAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s)
def test_popping_last_item(self):
self.assertEqual("G", self.mutable_s.pop())
def test_remove_items(self):
self.mutable_s.remove("G")
self.assertEqual(MutableSeq("TCAAAAGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s, "Remove first G")
self.assertRaises(ValueError, self.mutable_s.remove, 'Z')
def test_count(self):
self.assertEqual(7, self.mutable_s.count("A"))
self.assertEqual(2, self.mutable_s.count("AA"))
def test_index(self):
self.assertEqual(2, self.mutable_s.index("A"))
self.assertRaises(ValueError, self.mutable_s.index, "8888")
def test_reverse(self):
"""Test using reverse method"""
self.mutable_s.reverse()
self.assertEqual(MutableSeq("GTACTACGTAGGAAAACT", IUPAC.ambiguous_dna),
self.mutable_s)
def test_reverse_with_stride(self):
"""Test reverse using -1 stride"""
self.assertEqual(MutableSeq("GTACTACGTAGGAAAACT", IUPAC.ambiguous_dna),
self.mutable_s[::-1])
def test_complement(self):
self.mutable_s.complement()
self.assertEqual(str("AGTTTTCCTACGTAGTAC"), str(self.mutable_s))
def test_complement_rna(self):
seq = Seq.MutableSeq("AUGaaaCUG", IUPAC.unambiguous_rna)
seq.complement()
self.assertEqual(str("UACuuuGAC"), str(seq))
def test_complement_mixed_aphabets(self):
seq = Seq.MutableSeq("AUGaaaCTG")
with self.assertRaises(ValueError):
seq.complement()
def test_complement_rna_string(self):
seq = Seq.MutableSeq("AUGaaaCUG")
seq.complement()
self.assertEqual('UACuuuGAC', str(seq))
def test_complement_dna_string(self):
seq = Seq.MutableSeq("ATGaaaCTG")
seq.complement()
self.assertEqual('TACtttGAC', str(seq))
def test_reverse_complement(self):
self.mutable_s.reverse_complement()
self.assertEqual("CATGATGCATCCTTTTGA", str(self.mutable_s))
def test_reverse_complement_of_protein(self):
seq = Seq.MutableSeq("ACTGTCGTCT", Alphabet.generic_protein)
with self.assertRaises(ValueError):
seq.reverse_complement()
def test_to_string_method(self):
"""This method is currently deprecated, probably will need to remove this test soon"""
with warnings.catch_warnings(record=True):
self.mutable_s.tostring()
def test_extend_method(self):
self.mutable_s.extend("GAT")
self.assertEqual(MutableSeq("TCAAAAGGATGCATCATGGAT", IUPAC.ambiguous_dna),
self.mutable_s)
def test_extend_with_mutable_seq(self):
self.mutable_s.extend(MutableSeq("TTT", IUPAC.ambiguous_dna))
self.assertEqual(MutableSeq("TCAAAAGGATGCATCATGTTT", IUPAC.ambiguous_dna),
self.mutable_s)
def test_delete_stride_slice(self):
del self.mutable_s[4:6 - 1]
self.assertEqual(MutableSeq("TCAAAGGATGCATCATG", IUPAC.ambiguous_dna),
self.mutable_s)
def test_extract_third_nucleotide(self):
"""Test extracting every third nucleotide (slicing with stride 3)"""
self.assertEqual(MutableSeq("TAGTAA", IUPAC.ambiguous_dna), self.mutable_s[0::3])
self.assertEqual(MutableSeq("CAGGTT", IUPAC.ambiguous_dna), self.mutable_s[1::3])
self.assertEqual(MutableSeq("AAACCG", IUPAC.ambiguous_dna), self.mutable_s[2::3])
def test_set_wobble_codon_to_n(self):
"""Test setting wobble codon to N (set slice with stride 3)"""
self.mutable_s[2::3] = "N" * len(self.mutable_s[2::3])
self.assertEqual(MutableSeq("TCNAANGGNTGNATNATN", IUPAC.ambiguous_dna),
self.mutable_s)
class TestUnknownSeq(unittest.TestCase):
def setUp(self):
self.s = Seq.UnknownSeq(6)
def test_construction(self):
self.assertEqual("??????", str(Seq.UnknownSeq(6)))
self.assertEqual("NNNNNN", str(Seq.UnknownSeq(6, Alphabet.generic_dna)))
self.assertEqual("XXXXXX", str(Seq.UnknownSeq(6, Alphabet.generic_protein)))
self.assertEqual("??????", str(Seq.UnknownSeq(6, character="?")))
with self.assertRaises(ValueError):
Seq.UnknownSeq(-10)
with self.assertRaises(ValueError):
Seq.UnknownSeq(6, character='??')
def test_length(self):
self.assertEqual(6, len(self.s))
def test_repr(self):
self.assertEqual("UnknownSeq(6, alphabet = Alphabet(), character = '?')",
repr(self.s))
def test_add_method(self):
seq1 = Seq.UnknownSeq(3, Alphabet.generic_dna)
self.assertEqual("??????NNN", str(self.s + seq1))
seq2 = Seq.UnknownSeq(3, Alphabet.generic_dna)
self.assertEqual("NNNNNN", str(seq1 + seq2))
def test_getitem_method(self):
self.assertEqual("", self.s[-1:-1])
self.assertEqual("?", self.s[1])
self.assertEqual("?", self.s[5:])
self.assertEqual("?", self.s[:1])
self.assertEqual("??", self.s[1:3])
self.assertEqual("???", self.s[1:6:2])
self.assertEqual("????", self.s[1:-1])
with self.assertRaises(ValueError):
self.s[1:6:0]
def test_count(self):
self.assertEqual(6, self.s.count("?"))
self.assertEqual(3, self.s.count("??"))
self.assertEqual(0, Seq.UnknownSeq(6, character="N").count("?"))
self.assertEqual(0, Seq.UnknownSeq(6, character="N").count("??"))
self.assertEqual(4, Seq.UnknownSeq(6, character="?").count("?", start=2))
self.assertEqual(2, Seq.UnknownSeq(6, character="?").count("??", start=2))
def test_complement(self):
self.s.complement()
self.assertEqual(str("??????"), str(self.s))
def test_complement_of_protein(self):
"""Test reverse complement shouldn't work on a protein!"""
seq = Seq.UnknownSeq(6, Alphabet.generic_protein)
with self.assertRaises(ValueError):
seq.complement()
def test_reverse_complement(self):
self.s.reverse_complement()
self.assertEqual("??????", str(self.s))
def test_reverse_complement_of_protein(self):
seq = Seq.UnknownSeq(6, Alphabet.generic_protein)
self.assertRaises(ValueError, seq.reverse_complement)
def test_transcribe(self):
self.assertEqual("??????", self.s.transcribe())
def test_back_transcribe(self):
self.assertEqual("??????", self.s.back_transcribe())
def test_upper(self):
seq = Seq.UnknownSeq(6, Alphabet.generic_dna)
self.assertEqual("NNNNNN", str(seq.upper()))
def test_lower(self):
seq = Seq.UnknownSeq(6, Alphabet.generic_dna)
self.assertEqual("nnnnnn", str(seq.lower()))
def test_translation(self):
self.assertEqual("XX", str(self.s.translate()))
def test_translation_of_proteins(self):
seq = Seq.UnknownSeq(6, IUPAC.protein)
self.assertRaises(ValueError, seq.translate)
def test_ungap(self):
seq = Seq.UnknownSeq(7, alphabet=Alphabet.Gapped(Alphabet.DNAAlphabet(), "-"))
self.assertEqual("NNNNNNN", str(seq.ungap("-")))
seq = Seq.UnknownSeq(20, alphabet=Alphabet.Gapped(Alphabet.DNAAlphabet(), "-"), character='-')
self.assertEqual("", seq.ungap("-"))
class TestAmbiguousComplements(unittest.TestCase):
def test_ambiguous_values(self):
"""Test that other tests do not introduce characters to our values"""
self.assertFalse("-" in ambiguous_dna_values)
self.assertFalse("?" in ambiguous_dna_values)
class TestComplement(unittest.TestCase):
def test_complement_ambiguous_dna_values(self):
for ambig_char, values in sorted(ambiguous_dna_values.items()):
compl_values = str(Seq.Seq(values, alphabet=IUPAC.ambiguous_dna).complement())
self.assertEqual(set(compl_values),
set(ambiguous_dna_values[ambiguous_dna_complement[ambig_char]]))
def test_complement_ambiguous_rna_values(self):
for ambig_char, values in sorted(ambiguous_rna_values.items()):
compl_values = str(Seq.Seq(values, alphabet=IUPAC.ambiguous_rna).complement())
self.assertEqual(set(compl_values),
set(ambiguous_rna_values[ambiguous_rna_complement[ambig_char]]))
def test_complement_incompatible_alphabets(self):
seq = Seq.Seq("CAGGTU")
with self.assertRaises(ValueError):
seq.complement()
class TestReverseComplement(unittest.TestCase):
def test_reverse_complement(self):
test_seqs_copy = copy.copy(test_seqs)
test_seqs_copy.pop(21)
for nucleotide_seq in test_seqs_copy:
if not isinstance(nucleotide_seq.alphabet, Alphabet.ProteinAlphabet) and \
isinstance(nucleotide_seq, Seq.Seq):
expected = Seq.reverse_complement(nucleotide_seq)
self.assertEqual(repr(expected), repr(nucleotide_seq.reverse_complement()))
self.assertEqual(repr(expected[::-1]), repr(nucleotide_seq.complement()))
self.assertEqual(str(nucleotide_seq.complement()),
str(Seq.reverse_complement(nucleotide_seq))[::-1])
self.assertEqual(str(nucleotide_seq.reverse_complement()),
str(Seq.reverse_complement(nucleotide_seq)))
def test_reverse_complement_of_mixed_dna_rna(self):
seq = "AUGAAACTG" # U and T
self.assertRaises(ValueError, Seq.reverse_complement, seq)
def test_reverse_complement_of_rna(self):
seq = "AUGAAACUG"
self.assertEqual("CAGUUUCAU", Seq.reverse_complement(seq))
def test_reverse_complement_of_dna(self):
seq = "ATGAAACTG"
self.assertEqual("CAGTTTCAT", Seq.reverse_complement(seq))
def test_reverse_complement_on_proteins(self):
"""Test reverse complement shouldn't work on a protein!"""
for s in protein_seqs:
with self.assertRaises(ValueError):
Seq.reverse_complement(s)
with self.assertRaises(ValueError):
s.reverse_complement()
def test_complement_on_proteins(self):
"""Test complement shouldn't work on a protein!"""
for s in protein_seqs:
with self.assertRaises(ValueError):
s.complement()
class TestDoubleReverseComplement(unittest.TestCase):
def test_reverse_complements(self):
"""Test double reverse complement preserves the sequence"""
for sequence in [Seq.Seq("".join(sorted(ambiguous_rna_values))),
Seq.Seq("".join(sorted(ambiguous_dna_values))),
Seq.Seq("".join(sorted(ambiguous_rna_values)), Alphabet.generic_rna),
Seq.Seq("".join(sorted(ambiguous_dna_values)), Alphabet.generic_dna),
Seq.Seq("".join(sorted(ambiguous_rna_values)).replace("X", ""), IUPAC.IUPACAmbiguousRNA()),
Seq.Seq("".join(sorted(ambiguous_dna_values)).replace("X", ""), IUPAC.IUPACAmbiguousDNA()),
Seq.Seq("AWGAARCKG")]: # Note no U or T
reversed_sequence = sequence.reverse_complement()
self.assertEqual(str(sequence),
str(reversed_sequence.reverse_complement()))
class TestSequenceAlphabets(unittest.TestCase):
def test_sequence_alphabets(self):
"""Sanity test on the test sequence alphabets (see also enhancement
bug 2597)"""
for nucleotide_seq in test_seqs:
if "U" in str(nucleotide_seq).upper():
self.assertNotIsInstance(nucleotide_seq.alphabet, Alphabet.DNAAlphabet)
if "T" in str(nucleotide_seq).upper():
self.assertNotIsInstance(nucleotide_seq.alphabet, Alphabet.RNAAlphabet)
class TestTranscription(unittest.TestCase):
def test_transcription_dna_into_rna(self):
for nucleotide_seq in test_seqs:
if isinstance(nucleotide_seq.alphabet, Alphabet.DNAAlphabet):
expected = Seq.transcribe(nucleotide_seq)
self.assertEqual(str(nucleotide_seq).replace("t", "u").replace("T", "U"),
str(expected))
def test_transcription_dna_string_into_rna(self):
seq = "ATGAAACTG"
self.assertEqual("AUGAAACUG", Seq.transcribe(seq))
def test_seq_object_transcription_method(self):
for nucleotide_seq in test_seqs:
if isinstance(nucleotide_seq.alphabet, Alphabet.DNAAlphabet) and \
isinstance(nucleotide_seq, Seq.Seq):
self.assertEqual(repr(Seq.transcribe(nucleotide_seq)),
repr(nucleotide_seq.transcribe()))
def test_transcription_of_rna(self):
"""Test transcription shouldn't work on RNA!"""
seq = Seq.Seq("AUGAAACUG", IUPAC.ambiguous_rna)
with self.assertRaises(ValueError):
seq.transcribe()
def test_transcription_of_proteins(self):
"""Test transcription shouldn't work on a protein!"""
for s in protein_seqs:
with self.assertRaises(ValueError):
Seq.transcribe(s)
if isinstance(s, Seq.Seq):
with self.assertRaises(ValueError):
s.transcribe()
def test_back_transcribe_rna_into_dna(self):
for nucleotide_seq in test_seqs:
if isinstance(nucleotide_seq.alphabet, Alphabet.RNAAlphabet):
expected = Seq.back_transcribe(nucleotide_seq)
self.assertEqual(str(nucleotide_seq).replace("u", "t").replace("U", "T"),
str(expected))
def test_back_transcribe_rna_string_into_dna(self):
seq = "AUGAAACUG"
self.assertEqual("ATGAAACTG", Seq.back_transcribe(seq))
def test_seq_object_back_transcription_method(self):
for nucleotide_seq in test_seqs:
if isinstance(nucleotide_seq.alphabet, Alphabet.RNAAlphabet) and \
isinstance(nucleotide_seq, Seq.Seq):
expected = Seq.back_transcribe(nucleotide_seq)
self.assertEqual(repr(nucleotide_seq.back_transcribe()), repr(expected))
def test_back_transcription_of_proteins(self):
"""Test back-transcription shouldn't work on a protein!"""
for s in protein_seqs:
with self.assertRaises(ValueError):
Seq.back_transcribe(s)
if isinstance(s, Seq.Seq):
with self.assertRaises(ValueError):
s.back_transcribe()
def test_back_transcription_of_dna(self):
"""Test back-transcription shouldn't work on DNA!"""
seq = Seq.Seq("ATGAAACTG", IUPAC.ambiguous_dna)
with self.assertRaises(ValueError):
seq.back_transcribe()
class TestTranslating(unittest.TestCase):
def setUp(self):
self.test_seqs = [
Seq.Seq("TCAAAAGGATGCATCATG", IUPAC.unambiguous_dna),
Seq.Seq("ATGAAACTG"),
Seq.Seq("ATGAARCTG"),
Seq.Seq("AWGAARCKG"), # Note no U or T
Seq.Seq("".join(ambiguous_rna_values)),
Seq.Seq("".join(ambiguous_dna_values)),
Seq.Seq("".join(ambiguous_rna_values), Alphabet.generic_rna),
Seq.Seq("".join(ambiguous_dna_values), Alphabet.generic_dna),
Seq.Seq("".join(ambiguous_rna_values), IUPAC.IUPACAmbiguousRNA()),
Seq.Seq("".join(ambiguous_dna_values), IUPAC.IUPACAmbiguousDNA()),
Seq.Seq("AWGAARCKG", Alphabet.generic_dna),
Seq.Seq("AUGAAACUG", Alphabet.generic_rna),
Seq.Seq("ATGAAACTG", IUPAC.unambiguous_dna),
Seq.Seq("ATGAAACTGWN", IUPAC.ambiguous_dna),
Seq.Seq("AUGAAACUG", Alphabet.generic_rna),
Seq.Seq("AUGAAACUG", IUPAC.unambiguous_rna),
Seq.Seq("AUGAAACUGWN", IUPAC.ambiguous_rna),
Seq.Seq("ATGAAACTG", Alphabet.generic_nucleotide),
Seq.MutableSeq("ATGAAACTG", Alphabet.generic_dna),
Seq.MutableSeq("AUGaaaCUG", IUPAC.unambiguous_rna),
]
def test_translation(self):
for nucleotide_seq in self.test_seqs:
nucleotide_seq = nucleotide_seq[:3 * (len(nucleotide_seq) // 3)]
if isinstance(nucleotide_seq, Seq.Seq) and 'X' not in str(nucleotide_seq):
expected = Seq.translate(nucleotide_seq)
self.assertEqual(repr(expected), repr(nucleotide_seq.translate()))
def test_alphabets_of_translated_seqs(self):
self.assertEqual("IUPACProtein()", repr(self.test_seqs[0].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[1].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[2].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[3].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[10].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[11].translate().alphabet))
self.assertEqual("IUPACProtein()", repr(self.test_seqs[12].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[13].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[14].translate().alphabet))
self.assertEqual("IUPACProtein()", repr(self.test_seqs[15].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[16].translate().alphabet))
self.assertEqual("ExtendedIUPACProtein()", repr(self.test_seqs[17].translate().alphabet))
def test_translation_of_gapped_seq_with_gap_char_given(self):
seq = Seq.Seq("ATG---AAACTG")
self.assertEqual("M-KL", seq.translate(gap="-"))
self.assertRaises(TranslationError, seq.translate, gap="~")
def test_translation_of_gapped_seq_with_stop_codon_and_gap_char_given(self):
seq = Seq.Seq("GTG---GCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG")
self.assertEqual("V-AIVMGR*KGAR*", seq.translate(gap="-"))
self.assertRaises(TranslationError, seq.translate)
def test_translation_of_gapped_seq_with_gap_char_given_and_inferred_from_alphabet(self):
seq = Seq.Seq("ATG---AAACTG", Gapped(IUPAC.unambiguous_dna))
self.assertEqual("M-KL", seq.translate(gap="-"))
self.assertRaises(ValueError, seq.translate, gap="~")
seq = Seq.Seq("ATG~~~AAACTG", Gapped(IUPAC.unambiguous_dna))
self.assertRaises(ValueError, seq.translate, gap="~")
self.assertRaises(TranslationError, seq.translate, gap="-")
def test_translation_of_gapped_seq_with_gap_char_given_and_inferred_from_alphabet2(self):
"""Test using stop codon in sequence"""
seq = Seq.Seq("ATG---AAACTGTAG", Gapped(IUPAC.unambiguous_dna))
self.assertEqual("M-KL*", seq.translate(gap="-"))
self.assertRaises(ValueError, seq.translate, gap="~")
seq = Seq.Seq("ATG---AAACTGTAG", Gapped(IUPAC.unambiguous_dna))
self.assertEqual("M-KL@", seq.translate(gap="-", stop_symbol="@"))
self.assertRaises(ValueError, seq.translate, gap="~")
seq = Seq.Seq("ATG~~~AAACTGTAG", Gapped(IUPAC.unambiguous_dna))
self.assertRaises(ValueError, seq.translate, gap="~")
self.assertRaises(TranslationError, seq.translate, gap="-")
def test_translation_of_gapped_seq_no_gap_char_given(self):
seq = Seq.Seq("ATG---AAACTG")
self.assertRaises(TranslationError, seq.translate)
def test_translation_of_gapped_seq_no_gap_char_given_and_inferred_from_alphabet(self):
seq = Seq.Seq("ATG---AAACTG", Gapped(IUPAC.unambiguous_dna))
self.assertEqual("M-KL", seq.translate())
seq = Seq.Seq("ATG~~~AAACTG", Gapped(IUPAC.unambiguous_dna))
self.assertRaises(TranslationError, seq.translate)
seq = Seq.Seq("ATG~~~AAACTG", Gapped(IUPAC.unambiguous_dna, "~"))
self.assertEqual("M~KL", seq.translate())
def test_alphabet_of_translated_gapped_seq(self):
seq = Seq.Seq("ATG---AAACTG", Gapped(IUPAC.unambiguous_dna))
self.assertEqual("Gapped(ExtendedIUPACProtein(), '-')", repr(seq.translate().alphabet))
seq = Seq.Seq("ATG---AAACTG", Gapped(IUPAC.unambiguous_dna, "-"))
self.assertEqual("Gapped(ExtendedIUPACProtein(), '-')", repr(seq.translate().alphabet))
seq = Seq.Seq("ATG~~~AAACTG", Gapped(IUPAC.unambiguous_dna, "~"))
self.assertEqual("Gapped(ExtendedIUPACProtein(), '~')", repr(seq.translate().alphabet))
seq = Seq.Seq("ATG---AAACTG")
self.assertEqual("Gapped(ExtendedIUPACProtein(), '-')", repr(seq.translate(gap="-").alphabet))
seq = Seq.Seq("ATG~~~AAACTG")
self.assertEqual("Gapped(ExtendedIUPACProtein(), '~')", repr(seq.translate(gap="~").alphabet))
seq = Seq.Seq("ATG~~~AAACTGTAG")
self.assertEqual("HasStopCodon(Gapped(ExtendedIUPACProtein(), '~'), '*')",
repr(seq.translate(gap="~").alphabet))
seq = Seq.Seq("ATG---AAACTGTGA")
self.assertEqual("HasStopCodon(Gapped(ExtendedIUPACProtein(), '-'), '*')",
repr(seq.translate(gap="-").alphabet))
seq = Seq.Seq("ATG---AAACTGTGA")
self.assertEqual("HasStopCodon(Gapped(ExtendedIUPACProtein(), '-'), '@')",
repr(seq.translate(gap="-", stop_symbol="@").alphabet))
def test_translation_wrong_type(self):
"""Test translation table cannot be CodonTable"""
seq = Seq.Seq("ATCGTA")
with self.assertRaises(ValueError):
seq.translate(table=ambiguous_dna_complement)
def test_translation_of_string(self):
seq = "GTGGCCATTGTAATGGGCCGC"
self.assertEqual("VAIVMGR", Seq.translate(seq))
def test_translation_of_gapped_string_with_gap_char_given(self):
seq = "GTG---GCCATTGTAATGGGCCGC"
expected = "V-AIVMGR"
self.assertEqual(expected, Seq.translate(seq, gap="-"))
self.assertRaises(TypeError, Seq.translate, seq, gap=[])
self.assertRaises(ValueError, Seq.translate, seq, gap="-*")
def test_translation_of_gapped_string_no_gap_char_given(self):
seq = "GTG---GCCATTGTAATGGGCCGC"
self.assertRaises(TranslationError, Seq.translate, seq)
def test_translation_to_stop(self):
for nucleotide_seq in self.test_seqs:
nucleotide_seq = nucleotide_seq[:3 * (len(nucleotide_seq) // 3)]
if isinstance(nucleotide_seq, Seq.Seq) and 'X' not in str(nucleotide_seq):
short = Seq.translate(nucleotide_seq, to_stop=True)
self.assertEqual(str(short), str(Seq.translate(nucleotide_seq).split('*')[0]))
seq = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG"
self.assertEqual("VAIVMGRWKGAR", Seq.translate(seq, table=2, to_stop=True))
def test_translation_on_proteins(self):
"""Test translation shouldn't work on a protein!"""
for s in protein_seqs:
with self.assertRaises(ValueError):
Seq.translate(s)
if isinstance(s, Seq.Seq):
with self.assertRaises(ValueError):
s.translate()
def test_translation_of_invalid_codon(self):
for codon in ["TA?", "N-N", "AC_", "Ac_"]:
with self.assertRaises(TranslationError):
Seq.translate(codon)
def test_translation_of_glutamine(self):
for codon in ['SAR', 'SAG', 'SAA']:
self.assertEqual('Z', Seq.translate(codon))
def test_translation_of_asparagine(self):
for codon in ['RAY', 'RAT', 'RAC']:
self.assertEqual('B', Seq.translate(codon))
def test_translation_of_leucine(self):
for codon in ['WTA', 'MTY', 'MTT', 'MTW', 'MTM', 'MTH', 'MTA', 'MTC', 'HTA']:
self.assertEqual('J', Seq.translate(codon))
def test_translation_with_bad_table_argument(self):
table = dict()
with self.assertRaises(ValueError):
Seq.translate("GTGGCCATTGTAATGGGCCGC", table=table)
def test_translation_with_codon_table_as_table_argument(self):
table = standard_dna_table
self.assertEqual("VAIVMGR", Seq.translate("GTGGCCATTGTAATGGGCCGC", table=table))
def test_translation_incomplete_codon(self):
with warnings.catch_warnings(record=True):
Seq.translate("GTGGCCATTGTAATGGGCCG")
def test_translation_extra_stop_codon(self):
seq = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAGTAG"
with self.assertRaises(TranslationError):
Seq.translate(seq, table=2, cds=True)
def test_translation_using_cds(self):
seq = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG"
self.assertEqual("MAIVMGRWKGAR", Seq.translate(seq, table=2, cds=True))
seq = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCG" # not multiple of three
with self.assertRaises(TranslationError):
Seq.translate(seq, table=2, cds=True)
seq = "GTGGCCATTGTAATGGGCCGCTGAAAGGGTGCCCGA" # no stop codon
with self.assertRaises(TranslationError):
Seq.translate(seq, table=2, cds=True)
seq = "GCCATTGTAATGGGCCGCTGAAAGGGTGCCCGATAG" # no start codon
with self.assertRaises(TranslationError):
Seq.translate(seq, table=2, cds=True)
class TestStopCodons(unittest.TestCase):
def setUp(self):
self.misc_stops = "TAATAGTGAAGAAGG"
def test_stops(self):
for nucleotide_seq in [self.misc_stops, Seq.Seq(self.misc_stops),
Seq.Seq(self.misc_stops, Alphabet.generic_nucleotide),
Seq.Seq(self.misc_stops, Alphabet.DNAAlphabet()),
Seq.Seq(self.misc_stops, IUPAC.unambiguous_dna)]:
self.assertEqual("***RR", str(Seq.translate(nucleotide_seq)))
self.assertEqual("***RR", str(Seq.translate(nucleotide_seq, table=1)))
self.assertEqual("***RR", str(Seq.translate(nucleotide_seq, table="SGC0")))
self.assertEqual("**W**", str(Seq.translate(nucleotide_seq, table=2)))
self.assertEqual("**WRR", str(Seq.translate(nucleotide_seq,
table='Yeast Mitochondrial')))
self.assertEqual("**WSS", str(Seq.translate(nucleotide_seq, table=5)))
self.assertEqual("**WSS", str(Seq.translate(nucleotide_seq, table=9)))
self.assertEqual("**CRR", str(Seq.translate(nucleotide_seq,
table='Euplotid Nuclear')))
self.assertEqual("***RR", str(Seq.translate(nucleotide_seq, table=11)))
self.assertEqual("***RR", str(Seq.translate(nucleotide_seq, table='Bacterial')))
def test_translation_of_stops(self):
self.assertEqual(Seq.translate("TAT"), "Y")
self.assertEqual(Seq.translate("TAR"), "*")
self.assertEqual(Seq.translate("TAN"), "X")
self.assertEqual(Seq.translate("NNN"), "X")
self.assertEqual(Seq.translate("TAt"), "Y")
self.assertEqual(Seq.translate("TaR"), "*")
self.assertEqual(Seq.translate("TaN"), "X")
self.assertEqual(Seq.translate("nnN"), "X")
self.assertEqual(Seq.translate("tat"), "Y")
self.assertEqual(Seq.translate("tar"), "*")
self.assertEqual(Seq.translate("tan"), "X")
self.assertEqual(Seq.translate("nnn"), "X")
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
| 43.608326
| 117
| 0.63867
|
9f5c6fe68cb147919d090a7501e0ff0b439c7d77
| 1,669
|
py
|
Python
|
cogs/utils.py
|
ricky-davis/AstroServer
|
a0902a7b552cf5b909d06cc298c04225bdb67352
|
[
"MIT"
] | null | null | null |
cogs/utils.py
|
ricky-davis/AstroServer
|
a0902a7b552cf5b909d06cc298c04225bdb67352
|
[
"MIT"
] | null | null | null |
cogs/utils.py
|
ricky-davis/AstroServer
|
a0902a7b552cf5b909d06cc298c04225bdb67352
|
[
"MIT"
] | null | null | null |
#import requests
import json
import urllib
import urllib.error
from urllib import request
import ssl
# pylint: disable=no-member
ALVERSION = "v1.8.2.3"
class AstroRequests():
@classmethod
def get(cls, url, timeout=5):
proxies = request.getproxies()
proxy_handler = request.ProxyHandler(proxies)
opener = request.build_opener(proxy_handler)
gcontext = ssl.SSLContext()
request.install_opener(opener)
#print(f"get: {proxies}")
resp = request.urlopen(url, timeout=timeout, context=gcontext)
# print(resp)
return resp # cls.session.get(url, verify=False, timeout=timeout)
@classmethod
def post(cls, url, headers=None, jsonD=None, timeout=5):
if not headers:
headers = {}
if not jsonD:
jsonD = {}
req = request.Request(url)
if jsonD != {}:
jsonD = json.dumps(jsonD).encode('utf-8')
req.add_header('Content-Type', 'application/json; charset=utf-8')
gcontext = ssl.SSLContext()
# print(f"data: {jsonD}")
# print(f"headers:{headers}")
for header, value in headers.items():
req.add_header(header, value)
proxies = request.getproxies()
proxy_handler = request.ProxyHandler(proxies)
opener = request.build_opener(proxy_handler)
request.install_opener(opener)
# print(f"post: {proxies}")
# print(f"url: {url}")
try:
resp = request.urlopen(
req, data=jsonD, timeout=timeout, context=gcontext)
except urllib.error.HTTPError as e:
resp = e
return resp
| 30.345455
| 77
| 0.603355
|
9816396722966fc80af25e743fb50588699b2d65
| 4,198
|
py
|
Python
|
recipes/se_windnoise/test.py
|
wangwei2009/speechbrain
|
ebbac4561a9c9101786e0ab0b1105017eb655fc8
|
[
"Apache-2.0"
] | null | null | null |
recipes/se_windnoise/test.py
|
wangwei2009/speechbrain
|
ebbac4561a9c9101786e0ab0b1105017eb655fc8
|
[
"Apache-2.0"
] | null | null | null |
recipes/se_windnoise/test.py
|
wangwei2009/speechbrain
|
ebbac4561a9c9101786e0ab0b1105017eb655fc8
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env/python3
"""Recipe for training a speech enhancement system with spectral masking.
To run this recipe, do the following:
> python train.py train.yaml --data_folder /path/to/save/mini_librispeech
To read the code, first scroll to the bottom to see the "main" code.
This gives a high-level overview of what is going on, while the
Brain class definition provides the details of what happens
for each batch during training.
The first time you run it, this script should automatically download
and prepare the Mini Librispeech dataset for computation. Noise and
reverberation are automatically added to each sample from OpenRIR.
Authors
* Szu-Wei Fu 2020
* Chien-Feng Liao 2020
* Peter Plantinga 2021
"""
import sys
import torch
import speechbrain as sb
from hyperpyyaml import load_hyperpyyaml
from mini_librispeech_prepare import prepare_mini_librispeech
from train import SEBrain
from train import dataio_prep
import torchaudio
import os
# Recipe begins!
if __name__ == "__main__":
# Reading command line arguments
hparams_file, run_opts, overrides = sb.parse_arguments(sys.argv[1:])
# Initialize ddp (useful only for multi-GPU DDP training)
sb.utils.distributed.ddp_init_group(run_opts)
print(hparams_file)
# Load hyperparameters file with command-line overrides
with open(hparams_file) as fin:
hparams = load_hyperpyyaml(fin, overrides)
# Create experiment directory
sb.create_experiment_directory(
experiment_directory=hparams["output_folder"],
hyperparams_to_save=hparams_file,
overrides=overrides,
)
# Initialize the Brain object to prepare for mask training.
se_brain = SEBrain(
modules=hparams["modules"],
opt_class=hparams["opt_class"],
hparams=hparams,
run_opts=run_opts,
checkpointer=hparams["checkpointer"],
)
datasets = dataio_prep(hparams)
valid_set=datasets["valid"]
valid_set_0 = valid_set[1]
print("len(valid_set_0['noisy_sig']):{}".format(valid_set_0['noisy_sig'].shape))
noisy_wavs = valid_set_0['noisy_sig']
clean_wavs = valid_set_0['clean_sig']
print(noisy_wavs.shape)
torchaudio.save('wav/noisy_wavs.wav',noisy_wavs.view(1,-1),16000)
torchaudio.save('wav/clean_wavs.wav',clean_wavs.view(1,-1),16000)
# for data in valid_set:
# for key in data.keys():
# print(key)
# print()
# noisy_wavs, lens = data['noisy_sig']
# clean_wavs, lens = data['clean_sig']
# # print(len(noisy_wavs))
wav = 'wav/MIC1_000_3.wav'
# wav = 'wav/noisy_wavs.wav'
noisy_wavs = sb.dataio.dataio.read_audio(wav)
noisy_wavs = noisy_wavs.reshape(1, -1)
print(noisy_wavs.shape)
noisy_feats, noisy_feats_contex = se_brain.compute_feats(noisy_wavs)
print(noisy_feats.shape)
se_brain.on_evaluate_start(max_key="pesq")
se_brain.on_stage_start(sb.Stage.VALID, epoch=None)
se_brain.modules.eval()
noisy_feats = noisy_feats.to(se_brain.device)
noisy_wavs = noisy_wavs.to(se_brain.device)
noisy_feats_contex = noisy_feats_contex.to(se_brain.device)
# Masking is done here with the "signal approximation (SA)" algorithm.
# The masked input is compared directly with clean speech targets.
mask = se_brain.modules.model(noisy_feats_contex)
print("noisy_feats_contex.shape:{}".format(noisy_feats_contex.shape))
predict_spec = torch.mul(mask, noisy_feats)
# predict_spec = mask
print("predict_spec:{}".format(predict_spec.shape))
print("predict_spec:{}".format(predict_spec.device))
print("noisy_wavs:{}".format(noisy_wavs.shape))
print("noisy_wavs:{}".format(noisy_wavs.device))
# Also return predicted wav, for evaluation. Note that this could
# also be used for a time-domain loss term.
predict_wav = se_brain.hparams.resynth(
torch.expm1(predict_spec), noisy_wavs
)
print(predict_wav.shape)
filename = os.path.split(wav)[-1].split('.')[0]
save_name = os.path.join('wav/output',filename+'_enh2.wav')
print('save enh to {}'.format(save_name))
sb.dataio.dataio.write_audio(save_name, torch.squeeze(predict_wav.to('cpu')), 16000)
| 33.31746
| 88
| 0.718676
|
bb2ee642ecd4eba759208c336f08cf0f20c62b99
| 302
|
py
|
Python
|
packages/pyright-internal/src/tests/samples/paramSpec6.py
|
lipovsek/pytea
|
c536515a5e5947fac8871784323ba7eddc58956d
|
[
"MIT"
] | null | null | null |
packages/pyright-internal/src/tests/samples/paramSpec6.py
|
lipovsek/pytea
|
c536515a5e5947fac8871784323ba7eddc58956d
|
[
"MIT"
] | null | null | null |
packages/pyright-internal/src/tests/samples/paramSpec6.py
|
lipovsek/pytea
|
c536515a5e5947fac8871784323ba7eddc58956d
|
[
"MIT"
] | null | null | null |
# This sample tests that ParamSpecs support parameters with default values.
from typing import Callable, ParamSpec, TypeVar
V = TypeVar("V")
P = ParamSpec("P")
def foo(fn: Callable[P, V]) -> Callable[P, V]:
...
def bar(baz: str, qux: str = "") -> str:
...
foo(bar)("")
| 16.777778
| 76
| 0.589404
|
96c3e0f9d2fec71d73a40bd85991d3ee41ed9183
| 5,309
|
py
|
Python
|
xls.py
|
tordable/XlsxReports
|
d3786f862a32038b97c632e8f6c5624229ed8f9b
|
[
"Apache-2.0"
] | null | null | null |
xls.py
|
tordable/XlsxReports
|
d3786f862a32038b97c632e8f6c5624229ed8f9b
|
[
"Apache-2.0"
] | null | null | null |
xls.py
|
tordable/XlsxReports
|
d3786f862a32038b97c632e8f6c5624229ed8f9b
|
[
"Apache-2.0"
] | 1
|
2020-03-22T13:04:34.000Z
|
2020-03-22T13:04:34.000Z
|
"""Proxy interfaces and mocks for the XLS library.
This module contains interfaces to the XLS library and mocks, which will allow
switching the underlying implementation if necessary.
"""
__author__ = 'jt@javiertordable.com'
__copyright__ = "Copyright (C) 2014 Javier Tordable"
import xlsxwriter
def new_workbook(filename):
return _WorkbookImpl(filename)
class Workbook(object):
"""A XLS workbook."""
def add_worksheet(self, name):
"""Adds a new sheet to the workbook and returns it."""
pass
def get_worksheet(self, index):
"""Returns a worksheet with the given index."""
pass
def add_format(self):
"""Returns a new format."""
pass
def close(self):
"""Closes the workbook after all editing is complete."""
pass
class MockWorkbook(Workbook):
"""A mock implementation of the Workbook."""
def __init__(self):
self.sheets = []
self.formats = []
def add_worksheet(self, name):
sheet = MockSheet(name)
self.sheets.append(sheet)
return sheet
def get_worksheet(self, index):
return self.sheets[index]
def add_format(self):
fmt = MockFormat()
self.formats.append(fmt)
return fmt
def close(self):
pass
class _WorkbookImpl(Workbook):
"""Implementation of a workbook using the XlsxWriter library."""
def __init__(self, filename):
self._wb = xlsxwriter.Workbook(filename)
def add_worksheet(self, name):
sheet = self._wb.add_worksheet(name)
return _SheetImpl(sheet)
def get_worksheet(self, index):
sheet = _wb.worksheets()[index]
return _SheetImpl(sheet)
def add_format(self):
return _FormatImpl(self._wb)
def close(self):
self._wb.close()
class Format(object):
"""A format used in a workbook to determine cell appearance."""
pass
class MockFormat(Format):
"""A mock implementation of the Format."""
def __init__(self):
self.properties = {}
def set_bg_color(self, bg_color):
self.properties['bg_color'] = bg_color
def num_properties(self):
return len(self.properties)
def get_property(self, property_name):
return self.properties[property_name]
class _FormatImpl(Format):
"""Implementation of a format using the XlsxWriter library."""
def __init__(self, workbook):
self._fmt = workbook.add_format()
def set_bg_color(self, bg_color):
self._fmt.set_bg_color(bg_color)
class Sheet(object):
"""A sheet in a XLS report."""
def get_name(self):
"""Returns the name of the sheet."""
pass
def set_default_row(self, hide_unused_rows=False):
"""Sets properties for the default row."""
pass
def set_column(self, first_col, last_col, width=None, format=None,
options=None):
"""Sets properties of the column."""
pass
def write(self, row, column, value, format=None):
"""Writes a value in the given row and column cell using the given format.
"""
pass
class MockSheet(Sheet):
"""A mock implementation of the Sheet."""
def __init__(self, name):
self.name = name
self.cell_contents = {}
self.cell_formats = {}
self.properties = {}
def get_name(self):
return self.name
def set_default_row(self, hide_unused_rows=False):
self.properties['hide_unused_rows_by_default'] = hide_unused_rows
def set_column(self, first_col, last_col, width=None, format=None,
options=None):
# TODO(tordable): Consider storing per-column attributes.
self.properties['column_options'] = \
[first_col, last_col, width, format, str(options)]
def get_property(self, property_name):
return self.properties[property_name]
def write(self, row, column, value, format=None):
position = (row, column)
self.cell_contents[position] = value
self.cell_formats[position] = format
def read(self, row, column):
"""Reads a value in the cell.
This method is not in the Sheet interface.
"""
position = (row, column)
if position in self.cell_contents:
return self.cell_contents[position]
else:
return None
def __str__(self):
ret = ''
for position, value in self.cell_contents.items():
ret += '(' + str(position[0]) + ',' + str(position[1]) + ') = '
ret += str(value) + '\n'
return ret
class _SheetImpl(Sheet):
"""Implementation of a sheet using the XlsxWriter library."""
def __init__(self, sheet):
self._sh = sheet
def get_name(self):
return self._sh.get_name()
def set_default_row(self, hide_unused_rows=False):
self._sh.set_default_row(hide_unused_rows=hide_unused_rows)
def set_column(self, first_col, last_col, width=None, format=None,
options=None):
if format is not None:
column_format = format._fmt
else:
column_format = None
self._sh.set_column(first_col, last_col, width, column_format, options)
def write(self, row, column, value, format=None):
# TODO(tordable): Use the proper type if possible.
if value is not None and format is not None:
# The actual format passed to the XlsxWriter library is the inner format
# of the _FormatImpl.
self._sh.write(row, column, value, format._fmt)
elif value is not None and format is None:
self._sh.write(row, column, value)
else:
self._sh.write_blank(row, column, None)
| 24.808411
| 78
| 0.680166
|
5ed1e27881fe2110ad8f8ad5a9067f50aedf8670
| 19,776
|
py
|
Python
|
flax/nn/attention.py
|
skye/flax
|
23a91dbc27dd182e26f196546468d33238ca5735
|
[
"Apache-2.0"
] | null | null | null |
flax/nn/attention.py
|
skye/flax
|
23a91dbc27dd182e26f196546468d33238ca5735
|
[
"Apache-2.0"
] | null | null | null |
flax/nn/attention.py
|
skye/flax
|
23a91dbc27dd182e26f196546468d33238ca5735
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 The Flax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Attention core modules for Flax."""
from collections.abc import Iterable # pylint: disable=g-importing-member
import warnings
from .. import jax_utils
from . import base
from . import initializers
from . import stochastic
from flax import struct
import jax
from jax import lax
from jax import random
import jax.numpy as jnp
from .linear import default_kernel_init
from .linear import DenseGeneral
import numpy as onp
def dot_product_attention(query,
key,
value,
dtype=jnp.float32,
bias=None,
axis=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None):
"""Computes dot-product attention given query, key, and value.
This is the core function for applying attention based on
https://arxiv.org/abs/1706.03762. It calculates the attention weights given
query and key and combines the values using the attention weights. This
function supports multi-dimensional inputs.
Args:
query: queries for calculating attention with shape of `[batch_size, dim1,
dim2, ..., dimN, num_heads, mem_channels]`.
key: keys for calculating attention with shape of `[batch_size, dim1, dim2,
..., dimN, num_heads, mem_channels]`.
value: values to be used in attention with shape of `[batch_size, dim1,
dim2,..., dimN, num_heads, value_channels]`.
dtype: the dtype of the computation (default: float32)
bias: bias for the attention weights. This can be used for incorporating
autoregressive mask, padding mask, proximity bias.
axis: axises over which the attention is applied.
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rng: JAX PRNGKey: to be used for dropout
dropout_rate: dropout rate
deterministic: bool, deterministic or not (to apply dropout)
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
Returns:
Output of shape `[bs, dim1, dim2, ..., dimN,, num_heads, value_channels]`.
"""
assert key.shape[:-1] == value.shape[:-1]
assert (query.shape[0:1] == key.shape[0:1] and
query.shape[-1] == key.shape[-1])
if axis is None:
axis = tuple(range(1, key.ndim - 2))
if not isinstance(axis, Iterable):
axis = (axis,)
assert key.ndim == query.ndim
assert key.ndim == value.ndim
for ax in axis:
if not (query.ndim >= 3 and 1 <= ax < query.ndim - 2):
raise ValueError('Attention axis must be between the batch '
'axis and the last-two axes.')
depth = query.shape[-1]
n = key.ndim
# batch_dims is <bs, <non-attention dims>, num_heads>
batch_dims = tuple(onp.delete(range(n), axis + (n - 1,)))
# q & k -> (bs, <non-attention dims>, num_heads, <attention dims>, channels)
qk_perm = batch_dims + axis + (n - 1,)
key = key.transpose(qk_perm)
query = query.transpose(qk_perm)
# v -> (bs, <non-attention dims>, num_heads, channels, <attention dims>)
v_perm = batch_dims + (n - 1,) + axis
value = value.transpose(v_perm)
query = query / jnp.sqrt(depth).astype(dtype)
batch_dims_t = tuple(range(len(batch_dims)))
attn_weights = lax.dot_general(
query,
key, (((n - 1,), (n - 1,)), (batch_dims_t, batch_dims_t)),
precision=precision)
# apply attention bias: masking, droput, proximity bias, ect.
if bias is not None:
attn_weights = attn_weights + bias
# normalize the attention weights
norm_dims = tuple(range(attn_weights.ndim - len(axis), attn_weights.ndim))
attn_weights = jax.nn.softmax(attn_weights, axis=norm_dims)
attn_weights = attn_weights.astype(dtype)
# apply dropout
if not deterministic and dropout_rate > 0.:
if dropout_rng is None:
dropout_rng = stochastic.make_rng()
keep_prob = jax.lax.tie_in(attn_weights, 1.0 - dropout_rate)
if broadcast_dropout:
# dropout is broadcast across the batch+head+non-attention dimension
dropout_dims = attn_weights.shape[-(2 * len(axis)):]
dropout_shape = (tuple([1] * len(batch_dims_t)) + dropout_dims)
keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape)
else:
keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape)
multiplier = (keep.astype(attn_weights.dtype) /
jnp.asarray(keep_prob, dtype=dtype))
attn_weights = attn_weights * multiplier
# compute the new values given the attention weights
wv_contracting_dims = (norm_dims, range(value.ndim - len(axis), value.ndim))
y = lax.dot_general(
attn_weights,
value, (wv_contracting_dims, (batch_dims_t, batch_dims_t)),
precision=precision)
# back to (bs, dim1, dim2, ..., dimN, num_heads, channels)
perm_inv = _invert_perm(qk_perm)
y = y.transpose(perm_inv)
return y
def _invert_perm(perm):
perm_inv = [0] * len(perm)
for i, j in enumerate(perm):
perm_inv[j] = i
return tuple(perm_inv)
@struct.dataclass
class _CacheEntry:
key: onp.ndarray
value: onp.ndarray
i: onp.ndarray
def scan_in_dim(*args, **kwargs):
warnings.warn('scan_in_dim moved to flax.jax_utils',
DeprecationWarning)
return jax_utils.scan_in_dim(*args, **kwargs)
class Cache(base.Collection):
"""Collect intermediate activations for efficient autoregressive decoding."""
def initialize_cache(self, shape, dtype=None):
"""Initialize the cache for the given input shape.
Args:
shape: the shape of the batch and attention dimensions.
dtype: the dtype of the autoregressive cache.
Returns:
the initialized cache
"""
if dtype is None:
dtype = jnp.float32
def _init(shape_data):
ndim = int(shape_data[0])
tail_shape = tuple(shape_data[1:])
full_shape = shape + tail_shape
if len(full_shape) != ndim:
raise ValueError('Shape should be a tuple with the shape of the batch'
'and attention dims.')
return _CacheEntry(key=jnp.zeros(full_shape, dtype=dtype),
value=jnp.zeros(full_shape, dtype=dtype),
i=jnp.zeros((), jnp.uint32))
return Cache(jax.tree_map(_init, self.state))
jax.tree_util.register_pytree_node(
Cache, base.iterate_collection, base.collection_from_iterable)
class MultiHeadDotProductAttention(base.Module):
"""Multi-head dot-product attention."""
def apply(self,
inputs_q,
inputs_kv,
num_heads,
dtype=jnp.float32,
qkv_features=None,
out_features=None,
attention_axis=None,
causal_mask=False,
padding_mask=None,
key_padding_mask=None,
segmentation=None,
key_segmentation=None,
cache=None,
broadcast_dropout=True,
dropout_rng=None,
dropout_rate=0.,
deterministic=False,
precision=None,
kernel_init=default_kernel_init,
bias_init=initializers.zeros,
bias=True,
attention_fn=dot_product_attention):
"""Applies multi-head dot product attention on the input data.
Projects the inputs into multi-headed query, key, and value vectors,
applies dot-product attention and project the results to an output vector.
This can be used for encoder-decoder attention by specifying both `inputs_q`
and `inputs_kv` orfor self-attention by only specifying `inputs_q` and
setting `inputs_kv` to None.
Args:
inputs_q: input queries of shape `[bs, dim1, dim2, ..., dimN, features]`.
inputs_kv: key/values of shape `[bs, dim1, dim2, ..., dimN, features]`
or None for self-attention, inn which case key/values will be derived
from inputs_q.
num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])
should be divisible by the number of heads.
dtype: the dtype of the computation (default: float32)
qkv_features: dimension of the key, query, and value.
out_features: dimension of the last projection
attention_axis: axes over which the attention is applied ( 'None' means
attention over all axes, but batch, heads, and features).
causal_mask: boolean specifying whether to apply a causal mask on the
attention weights. If True, the output at timestep `t` will not depend
on inputs at timesteps strictly greater than `t`.
padding_mask: boolean specifying query tokens that are pad token.
key_padding_mask: boolean specifying key-value tokens that are pad token.
segmentation: segment indices for packed inputs_q data.
key_segmentation: segment indices for packed inputs_kv data.
cache: an instance of `flax.nn.attention.Cache` used for efficient
autoregressive decoding.
broadcast_dropout: bool: use a broadcasted dropout along batch dims.
dropout_rng: JAX PRNGKey: to be used for dropout
dropout_rate: dropout rate
deterministic: bool, deterministic or not (to apply dropout)
precision: numerical precision of the computation see `jax.lax.Precision`
for details.
kernel_init: initializer for the kernel of the Dense layers.
bias_init: initializer for the bias of the Dense layers.
bias: bool: whether pointwise QKVO dense transforms use bias.
attention_fn: dot_product_attention or compatible function. Accepts
query, key, value, and returns output of shape
`[bs, dim1, dim2, ..., dimN,, num_heads, value_channels]``
Returns:
output of shape `[bs, dim1, dim2, ..., dimN, features]`.
"""
assert causal_mask or not cache, (
'Caching is only support for causal attention.')
if inputs_kv is None:
inputs_kv = inputs_q
if attention_axis is None:
attention_axis = tuple(range(1, inputs_q.ndim - 1))
features = out_features or inputs_q.shape[-1]
qkv_features = qkv_features or inputs_q.shape[-1]
assert qkv_features % num_heads == 0, (
'Memory dimension must be divisible by number of heads.')
head_dim = qkv_features // num_heads
dense = DenseGeneral.partial(
axis=-1,
features=(num_heads, head_dim),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
precision=precision)
# project inputs_q to multi-headed q/k/v
# dimensions are then [bs, dims..., n_heads, n_features_per_head]
query, key, value = (dense(inputs_q, dtype=dtype, name='query'),
dense(inputs_kv, dtype=dtype, name='key'),
dense(inputs_kv, dtype=dtype, name='value'))
if cache:
assert isinstance(cache, Cache), 'cache must be an instance of Cache'
if self.is_initializing():
cache.store(onp.array((key.ndim,) + key.shape[-2:], dtype=onp.int32))
else:
cache_entry = cache.retrieve(None)
expected_shape = list(cache_entry.key.shape[:-2])
for attn_dim in attention_axis:
expected_shape[attn_dim] = 1
expected_shape = tuple(expected_shape) + inputs_q.shape[-1:]
if expected_shape != inputs_q.shape:
raise ValueError('Invalid shape provided, '
'expected shape %s instead got %s.' %
(expected_shape, inputs_q.shape))
if not isinstance(cache_entry, _CacheEntry):
raise ValueError('Cache is not initialized.')
cshape = cache_entry.key.shape
indices = [0] * len(cshape)
i = cache_entry.i
attn_size = onp.prod(onp.take(cshape, attention_axis))
for attn_dim in attention_axis:
attn_size //= cshape[attn_dim]
indices[attn_dim] = i // attn_size
i = i % attn_size
key = lax.dynamic_update_slice(cache_entry.key, key, indices)
value = lax.dynamic_update_slice(cache_entry.value, value, indices)
one = jnp.array(1, jnp.uint32)
cache_entry = cache_entry.replace(i=cache_entry.i + one,
key=key,
value=value)
cache.store(cache_entry)
# TODO(levskaya): verify this is still needed in translation decoding.
key_padding_mask = jnp.broadcast_to(
(jnp.arange(cshape[1]) < cache_entry.i), cshape[:2])
key_padding_mask = key_padding_mask.astype(jnp.float32)[..., None]
# create attention masks
mask_components = []
if causal_mask:
if cache and not self.is_initializing():
bias_pre_shape = (1,) * (key.ndim - 1)
attn_shape = tuple(onp.take(key.shape, attention_axis))
attn_size = onp.prod(attn_shape)
ii = jnp.arange(attn_size, dtype=jnp.uint32)
mask = ii < cache_entry.i
mask_components.append(mask.reshape(bias_pre_shape + attn_shape))
else:
mask_components.append(_make_causal_mask(key, attention_axis))
if padding_mask is not None:
if key_padding_mask is None:
key_padding_mask = padding_mask
padding_mask = make_padding_mask(
padding_mask_query=padding_mask,
padding_mask_key=key_padding_mask,
query_shape=query.shape,
key_shape=key.shape,
attention_axis=attention_axis)
mask_components.append(padding_mask)
if segmentation is not None:
if key_segmentation is None:
key_segmentation = segmentation
segmentation_mask = make_padding_mask(
padding_mask_query=segmentation,
padding_mask_key=key_segmentation,
query_shape=query.shape,
key_shape=key.shape,
attention_axis=attention_axis,
segmentation_mask=True)
mask_components.append(segmentation_mask)
if mask_components:
attention_mask = mask_components[0]
for component in mask_components[1:]:
attention_mask = jnp.logical_and(attention_mask, component)
# attention mask in the form of attention bias
attention_bias = lax.select(
attention_mask > 0, jnp.full(attention_mask.shape, 0.).astype(dtype),
jnp.full(attention_mask.shape, -1e10).astype(dtype))
else:
attention_bias = None
# apply attention
x = attention_fn(
query,
key,
value,
dtype=dtype,
axis=attention_axis,
bias=attention_bias,
precision=precision,
dropout_rng=dropout_rng,
dropout_rate=dropout_rate,
broadcast_dropout=broadcast_dropout,
deterministic=deterministic)
# back to the original inputs dimensions
out = DenseGeneral(
x,
features=features,
axis=(-2, -1),
kernel_init=kernel_init,
bias_init=bias_init,
bias=bias,
dtype=dtype,
precision=precision,
name='out')
return out
# TODO(flax-dev): Consider refactoring MultiHeadDotProductAttention and moving
# causal_mask and cache support into this class instead.
SelfAttention = MultiHeadDotProductAttention.partial(inputs_kv=None)
def make_padding_mask(padding_mask_query,
padding_mask_key,
query_shape,
key_shape,
attention_axis=None,
segmentation_mask=False):
"""Makes padding mask for attention weights.
In case of 1d inputs (i.e., `[bs, len, features]`, the attention weights will
be `[bs, len, len]` and this function makes a square matrix [len, len].
Args:
padding_mask_query: padding mask of query <bs, qdim1,.., qdimn>
padding_mask_key: padding mask of query <bs, key1,.., keyn>
query_shape: shape of the query
key_shape: shape of the key, which is equal to the shape of value.
attention_axis: axis over which attention is applied.
segmentation_mask: bool: if true use equality on cartesian product rather
than outer product for constructing segmentation masks.
Returns:
The padding mask for attention weights.
"""
assert query_shape[0] == key_shape[0]
assert len(query_shape) == len(key_shape)
ndim = len(key_shape)
if attention_axis is None:
attention_axis = tuple(range(1, ndim - 2))
assert isinstance(attention_axis, tuple)
for ax in attention_axis:
if not (ndim >= 3 and 1 <= ax < ndim - 2):
raise ValueError(
'Attention axis must be between the batch axis and the last-two axes.'
)
mask_shape_final = (query_shape[0], 1) # batch_size, 1 (for all heads)s
for ax in attention_axis:
mask_shape_final += (query_shape[ax],)
for ax in attention_axis:
mask_shape_final += (key_shape[ax],)
padding_mask_query = padding_mask_query[..., None]
padding_mask_key = padding_mask_key[..., None]
perm = (0,) + tuple(onp.flip(onp.arange(padding_mask_key.ndim)))[:-1]
if segmentation_mask:
mask = jnp.equal(padding_mask_query, padding_mask_key.transpose(perm))
else:
mask = jnp.multiply(padding_mask_query, padding_mask_key.transpose(perm))
mask = mask.reshape(mask_shape_final)
mask = jax.lax.convert_element_type(mask, jnp.float32)
return mask
def _make_causal_mask(key, attention_axis=None, self_mask=False):
"""Makes a causal mask, to be used for masking out the future for attention.
In case of 1d inputs (i.e., `[bs, len, features]`, the attention weights will
be `[bs, len, len]` and this function makes a square matrix [len, len] with
zeros in upper triangle and ones in lower triangle.
Args:
key: shape of the key, which is equal to the shape of value and is
assumed to be equal to the shape of the query (since this is used in
self-attention when decoding).
attention_axis: axis over which attention is applied.
self_mask: if mask out the diagonal or not.
Returns:
A causal mask to be used to mask out future positions.
"""
if attention_axis is None:
attention_axis = tuple(range(1, key.ndim - 2))
assert isinstance(attention_axis, tuple)
for ax in attention_axis:
if not (key.ndim >= 3 and 1 <= ax < key.ndim - 2):
raise ValueError(
'Attention axis must be between the batch axis and the last-two axes.'
)
mask_shape = tuple([1] * (key.ndim - len(attention_axis) - 1))
mask_shape_final = mask_shape
for _ in range(2):
flatten_dim = 1
for ax in attention_axis:
mask_shape_final += (key.shape[ax],)
flatten_dim *= key.shape[ax]
mask_shape += (flatten_dim,)
def tri(n, m, k=0):
# Tie in the key to avoid the mask becoming a constant.
# This way XLA can construct the mask during computation and fuse it
# with the attention ops.
x = lax.tie_in(key, jnp.arange(n, dtype=jnp.int32))
y = lax.tie_in(key, jnp.arange(m, dtype=jnp.int32))
mask = lax.ge(
(lax.broadcast_in_dim(x, shape=(n, m), broadcast_dimensions=(0,))) + k,
lax.broadcast(y, [n]))
return mask
k = -1 if self_mask else 0
mask = tri(*mask_shape[-2:], k=k).reshape(mask_shape_final)
return mask
| 37.525617
| 80
| 0.662116
|
714903a5968c727361c7896d254c80865e586c8c
| 4,242
|
py
|
Python
|
dialogue-engine/src/programy/config/writer.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 104
|
2020-03-30T09:40:00.000Z
|
2022-03-06T22:34:25.000Z
|
dialogue-engine/src/programy/config/writer.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 25
|
2020-06-12T01:36:35.000Z
|
2022-02-19T07:30:44.000Z
|
dialogue-engine/src/programy/config/writer.py
|
cotobadesign/cotoba-agent-oss
|
3833d56e79dcd7529c3e8b3a3a8a782d513d9b12
|
[
"MIT"
] | 10
|
2020-04-02T23:43:56.000Z
|
2021-05-14T13:47:01.000Z
|
"""
Copyright (c) 2020 COTOBA DESIGN, Inc.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import argparse
import yaml
import codecs
from programy.utils.logging.ylogger import YLogger
from programy.clients.events.console.config import ConsoleConfiguration
from programy.clients.restful.yadlan.config import YadlanRestConfiguration
class ConfigurationWriter(object):
def add_to_config(self, config_data, configuration, defaults=True):
config_data[configuration.id] = {}
configuration.to_yaml(config_data[configuration.id], defaults)
def execute(self, args):
config_data = {}
if args is None:
raise Exception("Args empty")
if args.clients is None:
raise Exception("No clients defined")
if 'all' in args.clients or 'console' in args.clients:
self.add_to_config(config_data, ConsoleConfiguration(), args.defaults)
if 'all' in args.clients or 'yadlan' in args.clients:
self.add_to_config(config_data, YadlanRestConfiguration(name="yadlan"))
client_config = ConsoleConfiguration()
bot_config = client_config._bot_configs[0]
self.add_to_config(config_data, bot_config, args.defaults)
brain_config = bot_config._brain_configs[0]
self.add_to_config(config_data, brain_config, args.defaults)
self.write_yaml(args.file, config_data)
def executing_confiuguration(self, client, file_name, default=False):
client_config = client.client_configuration
config_data = {}
self.add_to_config(config_data, client_config, default)
bot_config = client_config._bot_configs[0]
self.add_to_config(config_data, bot_config, default)
brain_config = bot_config._brain_configs[0]
self.add_to_config(config_data, brain_config, default)
storage_config = client_config._storage
self.add_to_config(config_data[client_config.id], storage_config, default)
self.write_yaml(file_name, config_data)
def write_yaml(self, filename, data):
YLogger.debug(self, "Writing config file to [%s]" % filename)
try:
with codecs.open(filename, 'w', 'utf-8') as outfile:
yaml.dump(data, outfile, default_flow_style=False, encoding="utf-8", allow_unicode=True)
except Exception:
YLogger.debug(self, "Failed to write config file [%s]" % filename)
@staticmethod
def create_arguments():
parser = argparse.ArgumentParser(description='Program-Y Configuration Writer')
parser.add_argument('-f', '--file', default="config.yaml", help="Name of file to create")
parser.add_argument('-c', '--clients', nargs='+', help="Name of client config to create, use multiple times or all")
parser.add_argument('-d', '--defaults', action='store_true', help="Create all config settings with default values")
return parser
@staticmethod
def run():
parser = ConfigurationWriter.create_arguments()
try:
app = ConfigurationWriter()
app.execute(parser.parse_args())
except Exception as e:
parser.print_help()
raise(e)
if __name__ == '__main__':
ConfigurationWriter.run()
| 39.64486
| 126
| 0.710985
|
955a75155d98c7dc24e38b214babdb42f66d4ceb
| 1,509
|
py
|
Python
|
snakemake/configs/hg38_template.py
|
saketkc/re-ribo-smk
|
c9326cbafdfa060e22e9af692d9146c37f5035ba
|
[
"BSD-2-Clause"
] | 1
|
2019-09-11T17:09:48.000Z
|
2019-09-11T17:09:48.000Z
|
snakemake/configs/hg38_template.py
|
saketkc/re-ribo-smk
|
c9326cbafdfa060e22e9af692d9146c37f5035ba
|
[
"BSD-2-Clause"
] | null | null | null |
snakemake/configs/hg38_template.py
|
saketkc/re-ribo-smk
|
c9326cbafdfa060e22e9af692d9146c37f5035ba
|
[
"BSD-2-Clause"
] | null | null | null |
RAWDATA_DIR = '/staging/as/skchoudh/re-ribo-datasets/hg38/'
OUT_DIR = '/staging/as/skchoudh/re-ribo-analysis/hg38/'
GENOME_FASTA = '/home/cmb-06/as/skchoudh/genomes/hg38/fasta/Homo_sapiens.GRCh38.dna_sm.primary_assembly.fa'
CHROM_SIZES = '/home/cmb-06/as/skchoudh/genomes/hg38/fasta/Homo_sapiens.GRCh38.dna_sm.primary_assembly.sizes'
STAR_INDEX = '/home/cmb-06/as/skchoudh/genomes/hg38/star_annotated_ribopod'
GTF_VERSION = 'v96'
GTF = '/home/cmb-06/as/skchoudh/genomes/hg38/annotation/Homo_sapiens.GRCh38.96.gtf'
GENE_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/gene.bed.gz'
STAR_CODON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/start_codon.bed.gz'
STOP_CODON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/stop_codon.bed.gz'
CDS_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/cds.bed.gz'
UTR5_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/utr5.bed.gz'
UTR3_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/utr3.bed.gz'
INTRON_BED = '/home/cmb-06/as/skchoudh/github_projects/riboraptor/riboraptor/annotation/hg38/v96/intron.bed.gz'
ORIENTATIONS = ['5prime', '3prime']
STRANDS = ['pos', 'neg', 'combined']
FRAGMENT_LENGTHS = range(18, 39)
RIBOTRICER_ANNOTATION_PREFIX = '/home/cmb-06/as/skchoudh/genomes/hg38/ribotricer_v96_annotation_longest'
| 79.421053
| 120
| 0.804506
|
ac6c0a3f63e05b517cefeb8ec8d0477c0ee1fb5c
| 1,231
|
py
|
Python
|
proxySTAR_V3/certbot/venv/lib/python2.7/site-packages/pylint/test/functional/useless_else_on_loop.py
|
mami-project/lurk
|
98c293251e9b1e9c9a4b02789486c5ddaf46ba3c
|
[
"Apache-2.0"
] | 2
|
2017-07-05T09:57:33.000Z
|
2017-11-14T23:05:53.000Z
|
Libraries/Python/pylint/v1.4.4/pylint/test/functional/useless_else_on_loop.py
|
davidbrownell/Common_Environment
|
4015872aeac8d5da30a6aa7940e1035a6aa6a75d
|
[
"BSL-1.0"
] | 1
|
2019-01-17T14:26:22.000Z
|
2019-01-17T22:56:26.000Z
|
Libraries/Python/pylint/v1.4.4/pylint/test/functional/useless_else_on_loop.py
|
davidbrownell/Common_Environment
|
4015872aeac8d5da30a6aa7940e1035a6aa6a75d
|
[
"BSL-1.0"
] | 1
|
2017-08-31T14:33:03.000Z
|
2017-08-31T14:33:03.000Z
|
"""Check for else branches on loops with break an return only."""
__revision__ = 0
def test_return_for():
"""else + return is not accetable."""
for i in range(10):
if i % 2:
return i
else: # [useless-else-on-loop]
print('math is broken')
def test_return_while():
"""else + return is not accetable."""
while True:
return 1
else: # [useless-else-on-loop]
print('math is broken')
while True:
def short_fun():
"""A function with a loop."""
for _ in range(10):
break
else: # [useless-else-on-loop]
print('or else!')
while True:
while False:
break
else: # [useless-else-on-loop]
print('or else!')
for j in range(10):
pass
else: # [useless-else-on-loop]
print('fat chance')
for j in range(10):
break
def test_return_for2():
"""no false positive for break in else
https://bitbucket.org/logilab/pylint/issue/117/useless-else-on-loop-false-positives
"""
for i in range(10):
for i in range(i):
if i % 2:
break
else:
break
else:
print('great math')
| 21.982143
| 88
| 0.5329
|
71a4b596d622e2b3fc75847a5b62e8a0a1e48ddd
| 17,896
|
py
|
Python
|
src/crunch.py
|
kawarimidoll/Crunch
|
3f1e8eee97c2e3cba34d95c05213513b5328291c
|
[
"Apache-2.0"
] | 3,047
|
2016-04-11T16:58:28.000Z
|
2022-03-29T19:51:25.000Z
|
src/crunch.py
|
Warlockk/Crunch
|
dee3cba5f1bfcf73263333c078373c298c3bba31
|
[
"Apache-2.0"
] | 79
|
2016-04-11T17:05:22.000Z
|
2022-03-21T07:46:41.000Z
|
src/crunch.py
|
Warlockk/Crunch
|
dee3cba5f1bfcf73263333c078373c298c3bba31
|
[
"Apache-2.0"
] | 143
|
2016-08-06T03:03:10.000Z
|
2022-03-18T00:27:22.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==================================================================
# crunch
# A PNG file optimization tool built on pngquant and zopflipng
#
# Copyright 2019 Christopher Simpkins
# MIT License
#
# Source Repository: https://github.com/chrissimpkins/Crunch
# ==================================================================
import sys
import os
import shutil
import struct
import subprocess
import time
from subprocess import CalledProcessError
from multiprocessing import Lock, Pool, cpu_count
# Locks
stdstream_lock = Lock()
logging_lock = Lock()
# Application Constants
VERSION = "4.0.0"
VERSION_STRING = "crunch v" + VERSION
# Processor Constant
# - Modify this to an integer value if you want to fix the number of
# processes spawned during execution. The process number is
# automatically defined during source execution when this is defined
# as a value of 0
PROCESSES = 0
# Dependency Path Constants for Command Line Executable
# - Redefine these path strings to use system-installed versions of
# pngquant and zopflipng (e.g. to "/usr/local/bin/[executable]")
PNGQUANT_CLI_PATH = os.path.join(os.path.expanduser("~"), "pngquant", "pngquant")
ZOPFLIPNG_CLI_PATH = os.path.join(os.path.expanduser("~"), "zopfli", "zopflipng")
# Crunch Directory (dot directory in $HOME)
CRUNCH_DOT_DIRECTORY = os.path.join(os.path.expanduser("~"), ".crunch")
# Log File Path Constants
LOGFILE_PATH = os.path.join(CRUNCH_DOT_DIRECTORY, "crunch.log")
HELP_STRING = """
==================================================
crunch
Copyright 2019 Christopher Simpkins
MIT License
Source: https://github.com/chrissimpkins/Crunch
==================================================
crunch is a command line executable that performs lossy optimization of one or more png image files with pngquant and zopflipng.
Usage:
$ crunch [image path 1]...[image path n]
Options:
--help, -h application help
--usage application usage
--version, -v application version
"""
USAGE = "$ crunch [image path 1]...[image path n]"
def main(argv):
# Create the Crunch dot directory in $HOME if it does not exist
# Only used for macOS GUI and macOS right-click menu service execution
if len(argv) > 0 and argv[0] in ("--gui", "--service"):
if not os.path.isdir(CRUNCH_DOT_DIRECTORY):
os.makedirs(CRUNCH_DOT_DIRECTORY)
# clear the text in the log file before every script execution
# logging is only maintained for the last execution of the script
open(LOGFILE_PATH, "w").close()
# ////////////////////////
# ANSI COLOR DEFINITIONS
# ////////////////////////
if not is_gui(sys.argv):
ERROR_STRING = "[ " + format_ansi_red("!") + " ]"
else:
ERROR_STRING = "[ ! ]"
# //////////////////////////////////
# CONFIRM ARGUMENT PRESENT
# //////////////////////////////////
if len(argv) == 0:
sys.stderr.write(
ERROR_STRING + " Please include one or more paths to PNG image files as "
"arguments to the script." + os.linesep
)
sys.exit(1)
# //////////////////////////////////////
# HELP, USAGE, VERSION option handling
# //////////////////////////////////////
if argv[0] in ("-v", "--version"):
print(VERSION_STRING)
sys.exit(0)
elif argv[0] in ("-h", "--help"):
print(HELP_STRING)
sys.exit(0)
elif argv[0] == "--usage":
print(USAGE)
sys.exit(0)
# ////////////////////////
# DEFINE DEPENDENCY PATHS
# ////////////////////////
PNGQUANT_EXE_PATH = get_pngquant_path()
ZOPFLIPNG_EXE_PATH = get_zopflipng_path()
# ////////////////////
# PARSE PNG_PATH_LIST
# ////////////////////
if is_gui(argv):
png_path_list = argv[1:]
else:
png_path_list = argv
# //////////////////////////////////
# COMMAND LINE ERROR HANDLING
# //////////////////////////////////
NOTPNG_ERROR_FOUND = False
for png_path in png_path_list:
# Not a file test
if not os.path.isfile(png_path): # is not an existing file
sys.stderr.write(
ERROR_STRING
+ " '"
+ png_path
+ "' does not appear to be a valid path to a PNG file"
+ os.linesep
)
sys.exit(1) # not a file, abort immediately
# PNG validity test
if not is_valid_png(png_path):
sys.stderr.write(
ERROR_STRING
+ " '"
+ png_path
+ "' is not a valid PNG file."
+ os.linesep
)
if is_gui(argv):
log_error(png_path + " is not a valid PNG file.")
NOTPNG_ERROR_FOUND = True
# Exit after checking all file requests and reporting on all invalid file paths (above)
if NOTPNG_ERROR_FOUND is True:
sys.stderr.write(
"The request was not executed successfully. Please try again with one or more valid PNG files."
+ os.linesep
)
if is_gui(argv):
log_error(
"The request was not executed successfully. Please try again with one or more valid PNG files."
)
sys.exit(1)
# Dependency error handling
if not os.path.exists(PNGQUANT_EXE_PATH):
sys.stderr.write(
ERROR_STRING
+ " pngquant executable was not identified on path '"
+ PNGQUANT_EXE_PATH
+ "'"
+ os.linesep
)
if is_gui(argv):
log_error(
"pngquant was not found on the expected path " + PNGQUANT_EXE_PATH
)
sys.exit(1)
elif not os.path.exists(ZOPFLIPNG_EXE_PATH):
sys.stderr.write(
ERROR_STRING
+ " zopflipng executable was not identified on path '"
+ ZOPFLIPNG_EXE_PATH
+ "'"
+ os.linesep
)
if is_gui(argv):
log_error(
"zopflipng was not found on the expected path " + ZOPFLIPNG_EXE_PATH
)
sys.exit(1)
# ////////////////////////////////////
# OPTIMIZATION PROCESSING
# ////////////////////////////////////
print("Crunching ...")
if len(png_path_list) == 1:
# there is only one PNG file, skip spawning of processes and just optimize it
optimize_png(png_path_list[0])
else:
processes = PROCESSES
# if not defined by user, start by defining spawned processes as number of available cores
if processes == 0:
processes = cpu_count()
# if total cores available is greater than number of files requested, limit to the latter number
if processes > len(png_path_list):
processes = len(png_path_list)
print(
"Spawning "
+ str(processes)
+ " processes to optimize "
+ str(len(png_path_list))
+ " image files..."
)
p = Pool(processes)
try:
p.map(optimize_png, png_path_list)
except Exception as e:
stdstream_lock.acquire()
sys.stderr.write("-----" + os.linesep)
sys.stderr.write(
ERROR_STRING
+ " Error detected during execution of the request."
+ os.linesep
)
sys.stderr.write(str(e) + os.linesep)
stdstream_lock.release()
if is_gui(argv):
log_error(str(e))
sys.exit(1)
# end of successful processing, exit code 0
if is_gui(argv):
log_info("Crunch execution ended.")
sys.exit(0)
# ///////////////////////
# FUNCTION DEFINITIONS
# ///////////////////////
def optimize_png(png_path):
img = ImageFile(png_path)
# define pngquant and zopflipng paths
PNGQUANT_EXE_PATH = get_pngquant_path()
ZOPFLIPNG_EXE_PATH = get_zopflipng_path()
# ////////////////////////
# ANSI COLOR DEFINITIONS
# ////////////////////////
if not is_gui(sys.argv):
ERROR_STRING = "[ " + format_ansi_red("!") + " ]"
else:
ERROR_STRING = "[ ! ]"
# --------------
# pngquant stage
# --------------
pngquant_options = (
" --quality=80-98 --skip-if-larger --force --strip --speed 1 --ext -crunch.png "
)
pngquant_command = (
PNGQUANT_EXE_PATH + pngquant_options + shellquote(img.pre_filepath)
)
try:
subprocess.check_output(pngquant_command, stderr=subprocess.STDOUT, shell=True)
except CalledProcessError as cpe:
if cpe.returncode == 98:
# this is the status code when file size increases with execution of pngquant.
# ignore at this stage, original file copied at beginning of zopflipng processing
# below if it is not present due to these errors
pass
elif cpe.returncode == 99:
# this is the status code when the image quality falls below the set min value
# ignore at this stage, original lfile copied at beginning of zopflipng processing
# below if it is not present to these errors
pass
else:
stdstream_lock.acquire()
sys.stderr.write(
ERROR_STRING
+ " "
+ img.pre_filepath
+ " processing failed at the pngquant stage."
+ os.linesep
)
stdstream_lock.release()
if is_gui(sys.argv):
log_error(
img.pre_filepath
+ " processing failed at the pngquant stage. "
+ os.linesep
+ str(cpe)
)
return None
else:
raise cpe
except Exception as e:
if is_gui(sys.argv):
log_error(
img.pre_filepath
+ " processing failed at the pngquant stage. "
+ os.linesep
+ str(e)
)
return None
else:
raise e
# ---------------
# zopflipng stage
# ---------------
# use --filters=0 by default for quantized PNG files (based upon testing by CS)
zopflipng_options = " -y --filters=0 "
# confirm that a file with proper path was generated by pngquant
# pngquant does not write expected file path if the file was larger after processing
if not os.path.exists(img.post_filepath):
shutil.copy(img.pre_filepath, img.post_filepath)
# If pngquant did not quantize the file, permit zopflipng to attempt compression with mulitple
# filters. This achieves better compression than the default approach for non-quantized PNG
# files, but takes significantly longer (based upon testing by CS)
zopflipng_options = " -y --lossy_transparent "
zopflipng_command = (
ZOPFLIPNG_EXE_PATH
+ zopflipng_options
+ shellquote(img.post_filepath)
+ " "
+ shellquote(img.post_filepath)
)
try:
subprocess.check_output(zopflipng_command, stderr=subprocess.STDOUT, shell=True)
except CalledProcessError as cpe:
stdstream_lock.acquire()
sys.stderr.write(
ERROR_STRING
+ " "
+ img.pre_filepath
+ " processing failed at the zopflipng stage."
+ os.linesep
)
stdstream_lock.release()
if is_gui(sys.argv):
log_error(
img.pre_filepath
+ " processing failed at the zopflipng stage. "
+ os.linesep
+ str(cpe)
)
return None
else:
raise cpe
except Exception as e:
if is_gui(sys.argv):
log_error(
img.pre_filepath
+ " processing failed at the pngquant stage. "
+ os.linesep
+ str(e)
)
return None
else:
raise e
# Check file size post-optimization and report comparison with pre-optimization file
img.get_post_filesize()
percent = img.get_compression_percent()
percent_string = "{0:.2f}%".format(percent)
# if compression occurred, color the percent string green
# otherwise, leave it default text color
if not is_gui(sys.argv) and percent < 100:
percent_string = format_ansi_green(percent_string)
# report percent original file size / post file path / size (bytes) to stdout (command line executable)
stdstream_lock.acquire()
print(
"[ "
+ percent_string
+ " ] "
+ img.post_filepath
+ " ("
+ str(img.post_size)
+ " bytes)"
)
stdstream_lock.release()
# report percent original file size / post file path / size (bytes) to log file (macOS GUI + right-click service)
if is_gui(sys.argv):
log_info(
"[ "
+ percent_string
+ " ] "
+ img.post_filepath
+ " ("
+ str(img.post_size)
+ " bytes)"
)
# -----------
# Utilities
# -----------
def fix_filepath_args(args):
arg_list = []
parsed_filepath = ""
for arg in args:
if arg[0] == "-":
# add command line options
arg_list.append(arg)
elif len(arg) > 2 and "." in arg[1:]:
# if format is `\w+\.\w+`, then this is a filename, not directory
# this is the end of a filepath string that may have had
# spaces in directories prior to this level. Let's recreate
# the entire original path
filepath = parsed_filepath + arg
arg_list.append(filepath)
# reset the temp string that is used to reconstruct the filepaths
parsed_filepath = ""
else:
# if the argument does not end with a .png, then there must have
# been a space in the directory paths, let's add it back
parsed_filepath = parsed_filepath + arg + " "
# return new argument list with fixed filepaths to calling code
return arg_list
def get_pngquant_path():
if sys.argv[1] == "--gui":
return "./pngquant"
elif sys.argv[1] == "--service":
return "/Applications/Crunch.app/Contents/Resources/pngquant"
else:
return PNGQUANT_CLI_PATH
def get_zopflipng_path():
if sys.argv[1] == "--gui":
return "./zopflipng"
elif sys.argv[1] == "--service":
return "/Applications/Crunch.app/Contents/Resources/zopflipng"
else:
return ZOPFLIPNG_CLI_PATH
def is_gui(arglist):
return "--gui" in arglist or "--service" in arglist
def is_valid_png(filepath):
# The PNG byte signature (https://www.w3.org/TR/PNG/#5PNG-file-signature)
expected_signature = struct.pack("8B", 137, 80, 78, 71, 13, 10, 26, 10)
# open the file and read first 8 bytes
with open(filepath, "rb") as filer:
signature = filer.read(8)
# return boolean test result for first eight bytes == expected PNG byte signature
return signature == expected_signature
def log_error(errmsg):
current_time = time.strftime("%m-%d-%y %H:%M:%S")
logging_lock.acquire()
with open(LOGFILE_PATH, "a") as filewriter:
filewriter.write(current_time + "\tERROR\t" + errmsg + os.linesep)
filewriter.flush()
os.fsync(filewriter.fileno())
logging_lock.release()
def log_info(infomsg):
current_time = time.strftime("%m-%d-%y %H:%M:%S")
logging_lock.acquire()
with open(LOGFILE_PATH, "a") as filewriter:
filewriter.write(current_time + "\tINFO\t" + infomsg + os.linesep)
filewriter.flush()
os.fsync(filewriter.fileno())
logging_lock.release()
return None
def shellquote(filepath):
return "'" + filepath.replace("'", "'\\''") + "'"
def format_ansi_red(text):
if sys.stdout.isatty():
return "\033[0;31m" + text + "\033[0m"
else:
return text
def format_ansi_green(text):
if sys.stdout.isatty():
return "\033[0;32m" + text + "\033[0m"
else:
return text
# ///////////////////////
# OBJECT DEFINITIONS
# ///////////////////////
class ImageFile(object):
def __init__(self, filepath):
self.pre_filepath = filepath
self.post_filepath = self._get_post_filepath()
self.pre_size = self._get_filesize(self.pre_filepath)
self.post_size = 0
def _get_filesize(self, file_path):
return os.path.getsize(file_path)
def _get_post_filepath(self):
path, extension = os.path.splitext(self.pre_filepath)
return path + "-crunch" + extension
def get_post_filesize(self):
self.post_size = self._get_filesize(self.post_filepath)
def get_compression_percent(self):
ratio = float(self.post_size) / float(self.pre_size)
percent = ratio * 100
return percent
if __name__ == "__main__":
# bugfix for macOS GUI / right-click service filepath issue
# when spaces are included in the absolute path to the image
# file. https://github.com/chrissimpkins/Crunch/issues/30
# This workaround reconstructs the original filepaths
# that are split by the shell script into separate arguments
# when there are spaces in the macOS file path
if len(sys.argv) > 1 and sys.argv[1] in ("--gui", "--service"):
arg_list = fix_filepath_args(sys.argv[1:])
main(arg_list)
else:
# the command line executable assumes that users will appropriately quote
# or escape special characters (including spaces) on the command line,
# no need for the special parsing treatment above
main(sys.argv[1:])
| 32.18705
| 128
| 0.563925
|
8e3d6255f6c94abca48649d730309b0307ac7ef7
| 1,345
|
py
|
Python
|
scripts/artifacts/aggDictpasscode.py
|
xperylabhub/iLEAPP
|
fd1b301bf2094387f51ccdbd10ed233ce9abd687
|
[
"MIT"
] | null | null | null |
scripts/artifacts/aggDictpasscode.py
|
xperylabhub/iLEAPP
|
fd1b301bf2094387f51ccdbd10ed233ce9abd687
|
[
"MIT"
] | 1
|
2021-01-16T05:32:40.000Z
|
2021-01-16T05:32:40.000Z
|
scripts/artifacts/aggDictpasscode.py
|
xperylabhub/iLEAPP
|
fd1b301bf2094387f51ccdbd10ed233ce9abd687
|
[
"MIT"
] | null | null | null |
import glob
import os
import pathlib
import plistlib
import sqlite3
import json
from scripts.artifact_report import ArtifactHtmlReport
from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly
def get_aggDictpasscode(files_found, report_folder, seeker):
file_found = str(files_found[0])
db = open_sqlite_db_readonly(file_found)
cursor = db.cursor()
cursor.execute("""
select
date(dayssince1970*86400, 'unixepoch'),
key,
value
from
scalars
where key like 'com.apple.passcode.numpasscode%'
"""
)
all_rows = cursor.fetchall()
usageentries = len(all_rows)
data_list = []
if usageentries > 0:
for row in all_rows:
data_list.append((row[0], row[1], row[2]))
description = ''
report = ArtifactHtmlReport('Aggregate Dictionary Passcode State')
report.start_artifact_report(report_folder, 'Passcode State', description)
report.add_script()
data_headers = ('Day','Key','Value')
report.write_artifact_data_table(data_headers, data_list, file_found)
report.end_artifact_report()
tsvname = 'Agg Dict Dictionary Passcode State'
tsv(report_folder, data_headers, data_list, tsvname)
tlactivity = 'Aggregate Dictionary Passcode State'
timeline(report_folder, tlactivity, data_list, data_headers)
else:
logfunc("No Agg Dict Dictionary Data available")
| 26.9
| 98
| 0.762082
|
3f407d49b66279492130f5976db4929a4ef6459b
| 786
|
py
|
Python
|
problems/328.Odd_Even_Linked_List/AC_double_n.py
|
subramp-prep/leetcode
|
d125201d9021ab9b1eea5e5393c2db4edd84e740
|
[
"Unlicense"
] | null | null | null |
problems/328.Odd_Even_Linked_List/AC_double_n.py
|
subramp-prep/leetcode
|
d125201d9021ab9b1eea5e5393c2db4edd84e740
|
[
"Unlicense"
] | null | null | null |
problems/328.Odd_Even_Linked_List/AC_double_n.py
|
subramp-prep/leetcode
|
d125201d9021ab9b1eea5e5393c2db4edd84e740
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: illuz <iilluzen[at]gmail.com>
# File: AC_double_n.py
# Create Date: 2016-02-23 19:16:07
# Usage: AC_simulation_n.py
# Descripton:
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def oddEvenList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head:
return head
evenhead = head.next
odd, even = head, evenhead
while even and even.next:
odd.next = odd.next.next
even.next = even.next.next
odd, even = odd.next, even.next
odd.next = evenhead
return head
| 24.5625
| 44
| 0.55598
|
083d51c9cc420296deab3d711ae81b139f65138e
| 568
|
py
|
Python
|
randomId.py
|
koolshare/ngrok-1.7
|
09b64b9551940fd1ec26b387a21342802f4ec845
|
[
"Apache-2.0"
] | 87
|
2016-01-09T00:43:47.000Z
|
2021-06-22T06:06:21.000Z
|
randomId.py
|
koolshare/ngrok-1.7
|
09b64b9551940fd1ec26b387a21342802f4ec845
|
[
"Apache-2.0"
] | 2
|
2016-01-23T08:42:49.000Z
|
2018-10-02T13:18:43.000Z
|
randomId.py
|
koolshare/ngrok-1.7
|
09b64b9551940fd1ec26b387a21342802f4ec845
|
[
"Apache-2.0"
] | 70
|
2016-02-22T14:16:42.000Z
|
2021-09-23T09:47:50.000Z
|
#!/bin/python env
# -*- coding: UTF-8 -*-
import random
import string
N=16
s = ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(N))
txt = '''
服务器:etunnel.net
端口: 4443
用户名: %s
密码: %s
可使用的子域名:%s
访问方式:
http://子域名.etunnel.net:8080
https://子域名.etunnel.net
访问比如使用 http://%s.etunnel.net:8080
'''
#s2 = ['"' + s4 + '"' for s4 in 'web api jsr jsn jsv'.split()]
s2 = "route pi nas iio zeus".split()
rlt = str(s2).replace("'", '"')
print '{"userId":"%s","authId":"%s","dns":%s}' % (s2[0], s, rlt)
print txt % (s, s2[0], rlt, s2[1])
| 21.037037
| 99
| 0.609155
|
c3bf866fc15755d8e8d96722717dca6487fa9505
| 25,423
|
py
|
Python
|
src/GUI/tk.py
|
shmilee/gdpy3
|
2e007851fc87793c0038f7b1dacba729271e17a3
|
[
"MIT"
] | 4
|
2018-08-07T13:28:06.000Z
|
2021-03-08T04:31:20.000Z
|
src/GUI/tk.py
|
shmilee/gdpy3
|
2e007851fc87793c0038f7b1dacba729271e17a3
|
[
"MIT"
] | null | null | null |
src/GUI/tk.py
|
shmilee/gdpy3
|
2e007851fc87793c0038f7b1dacba729271e17a3
|
[
"MIT"
] | 3
|
2018-05-05T01:34:33.000Z
|
2022-03-07T15:57:10.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2021 shmilee
import os
import time
import numpy
import tempfile
import getpass
import tkinter
from tkinter import ttk, simpledialog, filedialog, messagebox
from tkinter.constants import *
from distutils.version import LooseVersion
from ..__about__ import __data_path__, __icon_name__, __gversion__
from ..glogger import getGLogger
from ..processors import get_processor, Processor_Names
from ..processors.lib import Processor_Lib
__all__ = ['GTkApp']
log = getGLogger('G')
class GTkApp(object):
'''
GUI(Graphical User Interface) made by tkinter.
'''
recent = os.path.join(
tempfile.gettempdir(), 'gdpy3-%s-recent' % getpass.getuser())
def __init__(self, path=None, ask_sftp=False, parallel='off'):
'''
Parameters
----------
path: str
case path, default ''
ask_sftp: bool
if no path given, ask for a sftp(not local) path, default False
parallel: str
'off', 'multiprocess' or 'mpi4py', default 'off'
'''
root = tkinter.Tk(className='gdpy3-gui')
img = tkinter.PhotoImage(file=os.path.join(
__data_path__, 'icon', '%s.gif' % __icon_name__))
root.tk.call('wm', 'iconphoto', root._w, "-default", img)
root.protocol("WM_DELETE_WINDOW", self.close_app)
style = ttk.Style()
font = ('Microsoft YaHei', 10)
width = 0
style.configure('.', font=font)
main = ttk.Frame(root, relief=RIDGE, borderwidth=2)
main.pack(fill=BOTH, expand=1)
log.debug('Main frame packed.')
# 1
w_frame_proc = ttk.Labelframe(main, text='1. Processor:', width=width)
w_str_path = tkinter.StringVar(value='') # default path ''
w_str_path.trace("w", self.save_case_path)
w_entry_path = ttk.Entry(
w_frame_proc, font=font, textvariable=w_str_path)
w_entry_path.grid(in_=w_frame_proc, row=0, column=0, padx=5, pady=5,
sticky=W+E)
w_path = ttk.Button(
w_frame_proc, text='Path', width=0, command=self.ask_case_path)
w_path.grid(in_=w_frame_proc, row=0, column=1, padx=5, pady=5)
w_str_proc = tkinter.StringVar()
names = ['%s%s' % (Processor_Lib[n][1][0], n) for n in Processor_Names]
w_select_proc = ttk.Combobox(
w_frame_proc, values=names, font=font,
textvariable=w_str_proc, state='readonly')
w_str_proc.set(names[0])
w_select_proc.grid(in_=w_frame_proc, row=1, column=0, padx=5, pady=5)
w_pick = ttk.Button(
w_frame_proc, text="Pick", width=0, command=self.after_pick)
w_pick.grid(in_=w_frame_proc, row=1, column=1, padx=5, pady=5)
w_frame_proc.grid(row=0, column=0, padx=10, pady=5, sticky=W+E)
# 2
w_frame_fig = ttk.Labelframe(main, text='2. Figure:', width=width)
w_str_filter = tkinter.StringVar(value='^.*/.*$')
w_entry_filter = ttk.Entry(
w_frame_fig, font=font, textvariable=w_str_filter)
w_entry_filter.grid(in_=w_frame_fig, row=0, column=0, padx=5, pady=5)
w_filter = ttk.Button(
w_frame_fig, text='Filter', width=0, command=self.after_filter)
w_filter.grid(in_=w_frame_fig, row=0, column=1, padx=5, pady=5)
w_list_fig = tkinter.Variable(value=[])
w_listbox_fig = tkinter.Listbox(
w_frame_fig, selectmode=SINGLE, exportselection=0, font=font,
listvariable=w_list_fig, state='normal')
w_scrollbar_fig = ttk.Scrollbar(
w_frame_fig, orient="vertical", command=w_listbox_fig.yview)
w_listbox_fig.config(yscrollcommand=w_scrollbar_fig.set)
w_listbox_fig.grid(in_=w_frame_fig, row=1, columnspan=2,
sticky=W+E, padx=5, pady=5)
w_scrollbar_fig.grid(in_=w_frame_fig, row=1, column=1,
sticky=E+N+S, padx=5, pady=5)
w_frame_fig.grid(row=1, column=0, padx=10, pady=5, sticky=W+E)
# 3
w_frame_panel = ttk.Labelframe(main, text='3. Panel:', width=width)
# 3 - VerticalScrolledFrame
w_kw_out_frame = ttk.Frame(w_frame_panel)
w_kw_scrollbar = ttk.Scrollbar(w_kw_out_frame, orient=VERTICAL)
w_kw_scrollbar.pack(fill=Y, side=RIGHT, expand=0)
w_kw_canvas = tkinter.Canvas(
w_kw_out_frame, bd=0, highlightthickness=0,
yscrollcommand=w_kw_scrollbar.set, width=0, height=160)
w_kw_canvas.pack(side=LEFT, fill=BOTH, anchor=W, expand=1)
w_kw_scrollbar.config(command=w_kw_canvas.yview)
w_kw_canvas.xview_moveto(0)
w_kw_canvas.yview_moveto(0)
w_kw_in_frame = ttk.Frame(w_kw_canvas)
w_kw_canvas.create_window(0, 0, window=w_kw_in_frame, anchor=NW)
def _configure_canvas_interiorframe(event):
w_kw_canvas.update_idletasks()
w_kw_canvas.configure(scrollregion=w_kw_canvas.bbox("all"))
w_kw_in_frame.bind('<Configure>', _configure_canvas_interiorframe)
def _on_mousewheel(event):
number = 0
# Linux wheel event: event.delta = 0, event.num = 4 or 5
# Windows wheel event: event.delta = -120 or 120 ?
if event.num == 5 or event.delta == -120:
number = 1 # down
if event.num == 4 or event.delta == 120:
number = -1 # up
log.debug('Wheel event: num %d, delta %d -> %d'
% (event.num, event.delta, number))
w_kw_canvas.yview_scroll(number, "units")
w_kw_canvas.bind("<MouseWheel>", _on_mousewheel)
w_kw_canvas.bind("<Button-4>", _on_mousewheel)
w_kw_canvas.bind("<Button-5>", _on_mousewheel)
w_kw_in_frame.bind("<MouseWheel>", _on_mousewheel)
w_kw_in_frame.bind("<Button-4>", _on_mousewheel)
w_kw_in_frame.bind("<Button-5>", _on_mousewheel)
w_kw_out_frame.pack(in_=w_frame_panel, side=TOP,
expand=1, fill=X, padx=5, pady=5)
w_plot = ttk.Button(
w_frame_panel, text='Plot', width=8, command=self.after_plot)
w_plot.pack(in_=w_frame_panel, side=BOTTOM, anchor=E, padx=5, pady=5)
w_frame_panel.grid(row=2, column=0, padx=10, pady=5, sticky=W+E)
# 4 - bottom
version_text = "Version %s" % __gversion__
w_info = tkinter.Label(
main, relief=RIDGE, borderwidth=1, anchor=CENTER,
font=(font[0], 8), text="%s\t© 2017-%s shmilee" % (
version_text, time.strftime('%Y')))
w_info.grid(row=3, column=0, sticky=W+E)
log.debug('Main frame filled.')
# X - for share
self.root = root
try:
import screeninfo
monitors = screeninfo.get_monitors()
monitor = sorted(monitors, key=lambda m: m.width, reverse=True)[0]
except Exception:
log.debug('No active monitors found!')
monitor = None
self.monitor = monitor
self.center(root)
self.img = img
self.processor_name = w_str_proc
self.figlabel_filter = w_str_filter
self.figlabels = w_list_fig
self.figlistbox = w_listbox_fig
self.figkwframe = w_kw_in_frame
self.pathlabel = w_str_path
self.ask_sftp = ask_sftp
self.parallel = parallel
# cache processor instances, key (type(processor).__name__, self.path)
self.cache_processors = {}
self.processor = None
# cache all figure kwargs widgets of different processors
# key [processor.name-processor.saltstr][figlabel]
self.cache_figkwslib = {}
self.figkws = {} # kwargs widgets mapped in panel
# cache all plotted figure windows of different processors
# key of window: [processor.name-processor.saltstr][accfiglabel]
self.cache_figwindows = {}
self.next_figwindow_index = 0
# X - events
w_select_proc.bind("<<ComboboxSelected>>", self.after_processor_name)
w_entry_filter.bind("<Return>", self.after_filter)
w_listbox_fig.bind("<<ListboxSelect>>", self.after_figlabel)
# X - start
if path:
self.path = path
self.save_case_path()
else:
if self.ask_sftp:
self.ask_case_path(N=2)
else:
self.ask_case_path(N=1)
self.root.title('gdpy3 - %s' % self.path)
if monitor:
log.info('Start Tk mainloop on monitor %s.' % monitor.name)
else:
log.info('Start Tk mainloop.')
self.root.mainloop()
def close_app(self):
# close and destroy all fig windows
for key in self.cache_figwindows.keys():
log.debug('Destroy figure windows of %s' % key)
for n, w in self.cache_figwindows[key].items():
log.debug('Destroy window: %s' % n)
w.destroy()
# close root window
log.debug('Destroy root window.')
self.root.destroy()
log.info('Quit, bye!')
self.root.quit()
def _get_path(self):
return self.pathlabel.get()
def _set_path(self, path):
self.pathlabel.set(path)
path = property(_get_path, _set_path)
def center(self, win):
win.update_idletasks()
width = win.winfo_width()
height = win.winfo_height()
if self.monitor:
x = self.monitor.x + (self.monitor.width // 2) - (width // 2)
y = self.monitor.y + (self.monitor.height // 2) - (height // 2)
else:
x = (win.winfo_screenwidth() // 2) - (width // 2)
y = (win.winfo_screenheight() // 2) - (height // 2)
win.geometry('{}x{}+{}+{}'.format(width, height, x, y))
def ask_case_path(self, N=1):
if self.ask_sftp:
for _ in range(N):
path = simpledialog.askstring(
"Input sftp path",
"Directory in SSH server, format: "
"'sftp://username@host[:port]##remote/path'",
initialvalue='sftp://',
parent=self.root)
if path:
self.path = path
return
else:
initialdir = None
if os.path.isfile(self.recent):
# read, get valid recent initialdir
try:
with open(self.recent, 'r', encoding='utf-8') as rf:
old_dir = os.path.dirname(rf.readline())
for _ in range(N * 2):
if os.path.isdir(old_dir):
initialdir = old_dir
break
else:
old_dir = os.path.dirname(old_dir)
except Exception:
log.debug('Error of getting initialdir.', exc_info=1)
for _ in range(N):
path = filedialog.askopenfilename(
parent=self.root, initialdir=initialdir)
if path:
self.path = path
self.save_case_path()
return
def save_case_path(self, *args):
if self.path and not self.path.startswith('sftp://'):
try:
with open(self.recent, 'w', encoding='utf-8') as rf:
rf.write(os.path.abspath(self.path))
except Exception:
log.debug('Error of saving recent path.', exc_info=1)
def reset_panel(self):
for n, w in self.figkws.items():
w.grid_forget()
w.pack_forget()
w.place_forget()
self.figkws = {}
def close_figwindows(self, processor):
key = '%s-%s' % (processor.name, processor.saltstr)
if key in self.cache_figwindows:
log.debug('Hide figure windows of %s' % key)
for n, w in self.cache_figwindows[key].items():
log.debug('Hide window: %s' % n)
w.wm_withdraw()
def after_pick(self):
if self.processor_name.get():
gdpcls = get_processor(
name=self.processor_name.get()[1:], parallel=self.parallel)
if self.path.startswith('sftp://'):
def tk_gepasswd(prompt):
return simpledialog.askstring(
"Input Password", prompt, show='*', parent=self.root)
from ..utils import GetPasswd
GetPasswd.set(tk_gepasswd)
if self.path.endswith(gdpcls.saltname):
self.path = self.path[:-len(gdpcls.saltname)]
# close and hide old fig windows
if self.processor:
self.close_figwindows(self.processor)
key = (gdpcls.__name__, self.path)
if key in self.cache_processors:
gdp = self.cache_processors[key]
else:
gdp = gdpcls(self.path)
self.root.title('gdpy3 - %s' % self.path)
if gdp.pckloader:
log.debug('Set processor for %s' % self.path)
self.processor = gdp
if key not in self.cache_processors:
self.cache_processors[key] = gdp
self.figlabel_filter.set('^.*/.*$')
self.figlabels.set(gdp.availablelabels)
self.figlistbox.selection_clear(0, END)
# reset panel, hide kw widgets
self.reset_panel()
else:
messagebox.showerror(message='Failed to get processor!')
else:
messagebox.showwarning(message='Select processor first!')
def after_filter(self, *args):
if self.processor:
self.figlabels.set(self.processor.refind(
self.figlabel_filter.get()))
self.figlistbox.selection_clear(0, END)
# reset panel
self.reset_panel()
else:
messagebox.showwarning(message='Pick processor first!')
def after_plot(self):
if not self.figlistbox.curselection():
messagebox.showwarning(message='Select a figure first!')
return
if not self.processor.visplter.name.startswith('mpl::'):
messagebox.showerror(message='%s not supported with Tk!'
% self.processor.plotter.name)
return
figlabel = self.figlabels.get()[self.figlistbox.curselection()[0]]
figkwargs = {k: v.value for k, v in self.figkws.items()}
log.debug('Collect figkwargs: %s' % figkwargs)
accfiglabel = self.processor.visplt(figlabel, show=False, **figkwargs)
if accfiglabel in self.processor.visplter.figures:
figure = self.processor.visplter.get_figure(accfiglabel)
else:
messagebox.showerror(message='Failed to get figure object!')
return
key = '%s-%s' % (self.processor.name, self.processor.saltstr)
if key not in self.cache_figwindows:
self.cache_figwindows[key] = {}
if accfiglabel in self.cache_figwindows[key]:
log.debug('Raise old figure window.')
self.cache_figwindows[key][accfiglabel].wm_deiconify()
else:
log.debug('Get new figure window.')
index = self.next_figwindow_index
self.next_figwindow_index += 1
self.cache_figwindows[key][accfiglabel] = MplFigWindow(
figure, accfiglabel, index, self, class_='gdpy3-gui')
def after_processor_name(self, event):
self.figlabel_filter.set('^.*/.*$')
self.figlabels.set([])
self.figlistbox.selection_clear(0, END)
# reset panel
self.reset_panel()
# close fig windows
if self.processor:
self.close_figwindows(self.processor)
def get_figkws_widgets(self, options):
controls = {}
for k, v in options.items():
if v['widget'] in (
'IntSlider', 'FloatSlider',
'IntRangeSlider', 'FloatRangeSlider'):
# width = 8 if v['widget'].startswith('Float') else 0
controls[k] = LabeledSpinBoxs(
self.figkwframe,
v['description'],
v['rangee'],
v['value'],
state='readonly', width=0)
elif v['widget'] in ('Dropdown', 'SelectMultiple'):
controls[k] = LabeledListbox(
self.figkwframe,
v['description'],
v['options'],
v['value'],
width=0, height=0)
elif v['widget'] in ('Checkbox',):
controls[k] = Checkbox(
self.figkwframe,
v['description'],
v['value'])
else:
pass
return controls
def after_figlabel(self, event):
if self.figlistbox.curselection():
figlabel = self.figlabels.get()[self.figlistbox.curselection()[0]]
# update panel
self.reset_panel()
key = '%s-%s' % (self.processor.name, self.processor.saltstr)
if key not in self.cache_figkwslib:
self.cache_figkwslib[key] = {}
if figlabel in self.cache_figkwslib[key]:
log.debug("Use old widgets")
self.figkws = self.cache_figkwslib[key][figlabel]
else:
log.debug("Gen new widgets")
result = self.processor.export(figlabel, what='options')
options = dict(**result['digoptions'], **result['visoptions'])
if options:
self.figkws = self.get_figkws_widgets(options)
else:
self.figkws = {}
self.cache_figkwslib[key][figlabel] = self.figkws
for n, w in self.figkws.items():
w.pack(anchor=W, padx=5, pady=5)
class LabeledSpinBoxs(ttk.Frame):
'''
Spinbox widgets with a Label widget indicating their description.
Parameters
----------
desc: str
description
rangee: tuple
(from_, to, step)
init_val: one or more int or float numbers
initial value, num or [num1, num2, ...]
If N>1 numbers given, N Spinboxs will be generated.
cnf, kw: options for Spinbox
'''
def __init__(self, master, desc, rangee, init_val=None, cnf={}, **kw):
super(LabeledSpinBoxs, self).__init__(master, borderwidth=1)
self.label = ttk.Label(self, text=desc)
from_, to, step = rangee
for _k in ['from_', 'to', 'textvariable']:
_ignore = kw.pop(_k, None)
if init_val is None:
init_val = from_
if isinstance(init_val, (int, float, numpy.number)):
init_val = [init_val]
self.variables = []
self.spinboxs = []
for i_val in init_val:
if (isinstance(step, (int, numpy.integer))
and isinstance(i_val, (int, numpy.integer))):
self.variables.append(tkinter.IntVar(self))
elif (isinstance(step, (float, numpy.floating))
and isinstance(i_val, (float, numpy.floating))):
self.variables.append(tkinter.DoubleVar(self))
else:
log.error('type error: var %s, step %s ' %
(type(i_val), type(step)))
raise ValueError("Only int, float number supported!")
self.variables[-1].set(i_val)
self.spinboxs.append(tkinter.Spinbox(
self, cnf=cnf,
from_=from_, to=to, increment=step,
textvariable=self.variables[-1], **kw))
# arrange in line
self.label.pack(side=LEFT, padx=2)
for sb in self.spinboxs:
sb.pack(side=LEFT, padx=2)
@property
def value(self):
if len(self.variables) == 1:
return self.variables[0].get()
else:
return [v.get() for v in self.variables]
class LabeledListbox(ttk.Frame):
'''
A Listbox widget with a Label widget indicating its description.
Parameters
----------
desc: str
description
items: list
items to select
init_val: initial value, default None
If init_val is list, selectmode of Listbox will be MULTIPLE,
otherwise, SINGLE.
cnf, kw: options for Listbox
'''
def __init__(self, master, desc, items, init_val=None, cnf={}, **kw):
super(LabeledListbox, self).__init__(master, borderwidth=1)
self.label = ttk.Label(self, text=desc)
self.label.pack(side=LEFT, anchor=NW, padx=2)
self._variable = tkinter.Variable(self, value=items)
for _k in ['listvariable', 'exportselection', 'selectmode']:
_ignore = kw.pop(_k, None)
if isinstance(init_val, list):
self._selectmode = MULTIPLE
else:
self._selectmode = SINGLE
self.listbox = tkinter.Listbox(
self, cnf={}, listvariable=self._variable,
exportselection=0, selectmode=self._selectmode, **kw)
self.listbox.selection_clear(0, END)
if init_val:
if not isinstance(init_val, list):
init_val = [init_val]
for i_val in init_val:
if i_val in items:
self.listbox.selection_set(items.index(i_val))
self.listbox.pack(side=LEFT, padx=2)
@property
def value(self):
items = self._variable.get()
selection = self.listbox.curselection()
if self._selectmode == MULTIPLE:
return [items[i] for i in selection]
else:
return items[selection[0]]
class Checkbox(ttk.Checkbutton):
'''Ttk Checkbutton widget, add w.value support.'''
def __init__(self, master, desc, init_val=False, **kw):
self._variable = tkinter.BooleanVar(master, value=init_val)
for _k in ['offvalue', 'onvalue', 'text', 'variable']:
_ignore = kw.pop(_k, None)
super(Checkbox, self).__init__(
master, offvalue=False, onvalue=True,
text=desc, variable=self._variable, **kw)
@property
def value(self):
return self._variable.get()
class MplFigWindow(tkinter.Toplevel):
'''Embed a Matplotlib figure to Tkinter GUI.'''
def __init__(self, fig, figlabel, index, app, cnf={}, **kw):
super(MplFigWindow, self).__init__(master=app.root, cnf=cnf, **kw)
self.title('%s - %d - %s' % (figlabel, index, app.path))
self.protocol("WM_DELETE_WINDOW", self.wm_withdraw)
import matplotlib
# matplotlib.use('TkAgg', warn=False, force=True)
import matplotlib.backends.backend_tkagg as tkagg
if LooseVersion(matplotlib.__version__) <= LooseVersion('2.1.2'):
log.debug('Recommand matplotlib>=2.2.0')
tkagg.NavigationToolbar2Tk = tkagg.NavigationToolbar2TkAgg
self.figure_label = figlabel
self.figure_backend = tkagg
self.figure_canvas = None
self.figure_toolbar = None
self.figure_update(fig)
self.left_right(app.monitor, right=index % 2)
def left_right(self, monitor, right=1):
if monitor:
width = int(0.45 * monitor.width)
height = int(0.8 * monitor.height)
x = monitor.x + int(0.05 * monitor.width) + right * width
y = monitor.y + int(0.1 * monitor.height)
else:
width = int(0.45 * self.winfo_screenwidth())
height = int(0.8 * self.winfo_screenheight())
x = int(0.05 * self.winfo_screenwidth()) + right * width
y = int(0.1 * self.winfo_screenheight())
self.geometry('{}x{}+{}+{}'.format(width, height, x, y))
def figure_on_key_event(self, event):
from matplotlib.backend_bases import key_press_handler
key_press_handler(event, self.figure_canvas, self.figure_toolbar)
def figure_update(self, fig):
if self.figure_canvas:
self.figure_canvas.get_tk_widget().destroy()
if self.figure_toolbar:
self.figure_toolbar.destroy()
if fig:
canvas = self.figure_backend.FigureCanvasTkAgg(fig, master=self)
canvas.draw()
toolbar = self.figure_backend.NavigationToolbar2Tk(canvas, self)
toolbar.update()
canvas.mpl_connect('key_press_event', self.figure_on_key_event)
canvas.get_tk_widget().pack(side=TOP, fill=BOTH, expand=1)
# toolbar.pack()
self.figure_canvas = canvas
self.figure_toolbar = toolbar
# monkey patch default filename
# see: FigureCanvasBase.get_default_filename()
# FigureCanvasBase.get_window_title(), 3.4 deprecated
label = self.figure_label.replace('/', '-').replace(':', '_')
tstr = time.strftime('%Y%m%d')
filetype = canvas.get_default_filetype()
name = '%s-%s.%s' % (label, tstr, filetype)
canvas.get_default_filename = lambda: name
else:
self.figure_canvas = None
self.figure_toolbar = None
| 41.13754
| 79
| 0.570389
|
80e77fc91f76e9514dd55f3228aa001a494990b7
| 969
|
py
|
Python
|
pygame_template.py
|
johneastman/pygame-template
|
bf87e1514cfb9d6aba39c341823605fd8d7b04aa
|
[
"MIT"
] | null | null | null |
pygame_template.py
|
johneastman/pygame-template
|
bf87e1514cfb9d6aba39c341823605fd8d7b04aa
|
[
"MIT"
] | null | null | null |
pygame_template.py
|
johneastman/pygame-template
|
bf87e1514cfb9d6aba39c341823605fd8d7b04aa
|
[
"MIT"
] | null | null | null |
# Pygame template - skeleton for a new pygame project
import pygame
import random
WIDTH = 360
HEIGHT = 480
FPS = 30
# define colors
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
# initialize pygame and create window
pygame.init()
pygame.mixer.init()
pygame.font.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("My Game")
clock = pygame.time.Clock()
all_sprites = pygame.sprite.Group()
# Game loop
running = True
while running:
# keep loop running at the right speed
clock.tick(FPS)
# Process input (events)
for event in pygame.event.get():
# check for closing window
if event.type == pygame.QUIT:
running = False
# Update
all_sprites.update()
# Draw / render
screen.fill(BLACK)
all_sprites.draw(screen)
# After drawing everything, update the screen
pygame.display.flip()
pygame.quit()
| 19.38
| 53
| 0.657379
|
975edf3b89633df244ec76d7154bcb38fb990213
| 20,191
|
py
|
Python
|
binary/util.py
|
alexb5dh/hazelcast-client-protocol
|
e3bb7920cb62707d701b382ac3859c2cb7d8ef21
|
[
"Apache-2.0"
] | null | null | null |
binary/util.py
|
alexb5dh/hazelcast-client-protocol
|
e3bb7920cb62707d701b382ac3859c2cb7d8ef21
|
[
"Apache-2.0"
] | null | null | null |
binary/util.py
|
alexb5dh/hazelcast-client-protocol
|
e3bb7920cb62707d701b382ac3859c2cb7d8ef21
|
[
"Apache-2.0"
] | null | null | null |
import struct
from os.path import exists, join
from binary import *
from binary.constants import *
from functools import partial
from util import *
from . import reference_objects
formats = {
'boolean': '<?',
'byte': '<B',
'int': '<I',
'long': '<q',
'short': '<H',
}
sizes = {
'boolean': BOOLEAN_SIZE_IN_BYTES,
'byte': BYTE_SIZE_IN_BYTES,
'int': INT_SIZE_IN_BYTES,
'long': LONG_SIZE_IN_BYTES,
'UUID': UUID_SIZE_IN_BYTES,
}
id_fmt = '0x%02x%02x%02x'
def read_definition(definition, protocol_defs_path):
file_path = join(protocol_defs_path, definition + '.yaml')
with open(file_path, 'r') as file:
return yaml.load(file, Loader=yaml.Loader)
def get_custom_type_definitions(protocol_defs_path):
custom_codec_defs_path = join(protocol_defs_path, 'custom')
if not exists(custom_codec_defs_path):
return {}
definitions = read_definition('Custom', custom_codec_defs_path)
result = {}
custom_types = definitions['customTypes']
for definition in custom_types:
result[definition['name']] = definition
return result
class Frame:
def __init__(self, content, flags=DEFAULT_FLAGS):
self.content = content
self.flags = flags
def encode_frame(self, is_final=False):
frame_length = SIZE_OF_FRAME_LENGTH_AND_FLAGS + len(self.content)
b = bytearray(frame_length)
struct.pack_into(formats['int'], b, FRAME_LENGTH_OFFSET, frame_length)
flags = self.flags | IS_FINAL_FLAG if is_final else self.flags
struct.pack_into(formats['short'], b, FLAGS_OFFSET, flags)
b[SIZE_OF_FRAME_LENGTH_AND_FLAGS:] = self.content
return b
NULL_FRAME = Frame(bytearray(0), IS_NULL_FLAG)
BEGIN_FRAME = Frame(bytearray(0), BEGIN_DATA_STRUCTURE_FLAG)
END_FRAME = Frame(bytearray(0), END_DATA_STRUCTURE_FLAG)
class ClientMessage:
def __init__(self):
self.frames = []
def add_frame(self, frame):
self.frames.append(frame)
def write(self, file):
n = len(self.frames)
for i in range(n):
frame = self.frames[i]
is_last_frame = i == (n - 1)
file.write(frame.encode_frame(is_last_frame))
class Encoder:
def __init__(self, protocol_defs_path, version):
self.custom_type_definitions = get_custom_type_definitions(protocol_defs_path)
self.custom_type_encoder = CustomTypeEncoder(self, self.custom_type_definitions)
self.var_sized_encoder = VarSizedParamEncoder(self)
self.version = version
def encode(self, message_def, fix_sized_params_offset, set_partition_id=False, is_event=False, is_null_test=False):
params = filter_new_params(message_def.get('params', []), self.version)
fix_sized_params = fixed_params(params)
var_sized_params = var_size_params(params)
client_message = ClientMessage()
initial_frame = FixSizedParamEncoder.create_initial_frame(fix_sized_params, message_def['id'],
fix_sized_params_offset, set_partition_id,
is_null_test)
if is_event:
initial_frame.flags |= IS_EVENT_FLAG
client_message.add_frame(initial_frame)
self.var_sized_encoder.encode_var_sized_frames(var_sized_params, client_message, is_null_test)
return client_message
class FixSizedParamEncoder:
@staticmethod
def create_initial_frame(fix_sized_params, message_id, offset, set_partition_id, is_null_test=False):
content_size = sum([sizes[p['type']] for p in fix_sized_params])
content = bytearray(offset + content_size)
struct.pack_into(formats['int'], content, TYPE_FIELD_OFFSET, message_id)
if set_partition_id:
struct.pack_into(formats['int'], content, PARTITION_ID_FIELD_OFFSET, -1 & 0xffffffff)
for param in fix_sized_params:
value = reference_objects.objects.get(param['type'])
FixSizedParamEncoder.pack_into(content, offset, param['type'], value, is_null_test and param['nullable'])
offset += sizes[param['type']]
return Frame(content, UNFRAGMENTED_MESSAGE)
@staticmethod
def encode_fix_sized_entry_list_frame(client_message, key_type, value_type):
entry_size = sizes[key_type] + sizes[value_type]
obj = reference_objects.map_objects[key_type][value_type]
content = bytearray(entry_size * len(obj))
offset = 0
for key in obj:
FixSizedParamEncoder.pack_into(content, offset, key_type, key)
offset += sizes[key_type]
FixSizedParamEncoder.pack_into(content, offset, value_type, obj[key])
offset += sizes[value_type]
client_message.add_frame(Frame(content))
@staticmethod
def encode_fix_sized_list_frame(client_message, item_type):
obj = reference_objects.list_objects[item_type]
content = bytearray(sizes[item_type] * len(obj))
offset = 0
for item in obj:
FixSizedParamEncoder.pack_into(content, offset, item_type, item)
offset += sizes[item_type]
client_message.add_frame(Frame(content))
@staticmethod
def pack_into(buffer, offset, type, value, should_be_null=False):
if type == 'UUID':
struct.pack_into(formats['boolean'], buffer, offset, should_be_null)
if should_be_null:
return
offset += sizes['boolean']
struct.pack_into(formats['long'], buffer, offset, value.most_sig_bits)
offset += sizes['long']
struct.pack_into(formats['long'], buffer, offset, value.least_sig_bits)
else:
struct.pack_into(formats[type], buffer, offset, value)
class CustomTypeEncoder:
def __init__(self, encoder, custom_type_definitions):
self.encoder = encoder
self.custom_type_definitions = custom_type_definitions
def encode_custom_type(self, client_message, custom_type_name, is_null_test=False):
if is_null_test:
client_message.add_frame(NULL_FRAME)
return
definition = self.custom_type_definitions.get(custom_type_name)
params = filter_new_params(definition.get('params', []), self.encoder.version)
fix_sized_params = fixed_params(params)
fix_sized_new_params = new_params(definition['since'], fix_sized_params)
var_sized_params = var_size_params(params)
should_add_begin_frame = (len(fix_sized_params) > len(fix_sized_new_params)) or len(fix_sized_params) == 0
if should_add_begin_frame:
client_message.add_frame(BEGIN_FRAME)
initial_frame = self.create_initial_frame(custom_type_name, fix_sized_params)
if initial_frame is not None:
if not should_add_begin_frame:
initial_frame.flags |= BEGIN_DATA_STRUCTURE_FLAG
client_message.add_frame(initial_frame)
self.encoder.var_sized_encoder.encode_var_sized_frames(var_sized_params, client_message)
client_message.add_frame(END_FRAME)
@staticmethod
def create_initial_frame(custom_type_name, fix_sized_params):
content_size = sum([sizes[p['type']] for p in fix_sized_params])
if content_size == 0:
return None
content = bytearray(content_size)
offset = 0
specific_values = reference_objects.objects.get(custom_type_name, None)
for param in fix_sized_params:
specific_value = specific_values.get(param['name'], None) if specific_values is not None else None
value = specific_value if specific_value is not None else reference_objects.objects.get(param['type'])
FixSizedParamEncoder.pack_into(content, offset, param['type'], value)
offset += sizes[param['type']]
return Frame(content)
def encoder_for(self, param_type, is_null_test=False):
return lambda client_message: self.encode_custom_type(client_message, param_type, is_null_test)
class VarSizedParamEncoder:
def __init__(self, encoder):
self.encoder = encoder
self.var_sized_encoders = {
'byteArray': self.encode_byte_array_frame,
'longArray': self.encode_long_array_frame,
'String': self.encode_string_frame,
'Data': self.encode_data_frame,
'EntryList_Integer_UUID': partial(FixSizedParamEncoder.encode_fix_sized_entry_list_frame,
key_type='int', value_type='UUID'),
'EntryList_UUID_Long': partial(FixSizedParamEncoder.encode_fix_sized_entry_list_frame,
key_type='UUID', value_type='long'),
'EntryList_UUID_UUID': partial(FixSizedParamEncoder.encode_fix_sized_entry_list_frame,
key_type='UUID', value_type='UUID'),
'EntryList_Integer_Long': partial(FixSizedParamEncoder.encode_fix_sized_entry_list_frame,
key_type='int', value_type='long'),
'EntryList_Integer_Integer': partial(FixSizedParamEncoder.encode_fix_sized_entry_list_frame,
key_type='int', value_type='int'),
'EntryList_Long_byteArray': self.encode_long_byte_array_entry_list,
'EntryList_UUID_List_Integer': self.encode_uuid_integer_list_entry_list,
'List_Integer': partial(FixSizedParamEncoder.encode_fix_sized_list_frame, item_type='int'),
'List_Long': partial(FixSizedParamEncoder.encode_fix_sized_list_frame, item_type='long'),
'List_UUID': partial(FixSizedParamEncoder.encode_fix_sized_list_frame, item_type='UUID'),
'ListCN_Data': partial(self.encode_multi_frame_list, encoder=self.encode_data_frame),
'List_Data': partial(self.encode_multi_frame_list, encoder=self.encode_data_frame),
'List_ScheduledTaskHandler': partial(self.encode_multi_frame_list, encoder=self.encoder.custom_type_encoder
.encoder_for('ScheduledTaskHandler')),
'SqlPage': partial(self.encode_sqlpage)
}
def encode_var_sized_frames(self, var_sized_params, client_message, is_null_test=False):
for param in var_sized_params:
param_type = param['type']
self.encode_var_sized_frame(client_message, param_type, is_null_test and param['nullable'])
def encode_var_sized_frame(self, client_message, param_type, nullable=False):
if nullable:
client_message.add_frame(NULL_FRAME)
return
if is_var_sized_list(param_type) or is_var_sized_list_contains_nullable(param_type):
item_type = param_type.split('_', 1)[1]
self.encode_multi_frame_list(client_message, self.encoder_for(item_type))
elif is_var_sized_map(param_type) or is_var_sized_entry_list(param_type):
key_type = param_type.split('_', 2)[1]
value_type = param_type.split('_', 2)[2]
self.encode_multi_frame_map(client_message, self.encoder_for(key_type), self.encoder_for(value_type))
else:
self.encoder_for(param_type)(client_message)
@staticmethod
def encode_multi_frame_list(client_message, encoder):
client_message.add_frame(BEGIN_FRAME)
encoder(client_message)
client_message.add_frame(END_FRAME)
@staticmethod
def encode_multi_frame_map(client_message, key_encoder, value_encoder):
client_message.add_frame(BEGIN_FRAME)
key_encoder(client_message)
value_encoder(client_message)
client_message.add_frame(END_FRAME)
@staticmethod
def encode_byte_array_frame(client_message):
client_message.add_frame(Frame(reference_objects.BYTEARRAY))
@staticmethod
def encode_long_array_frame(client_message):
content = bytearray(len(reference_objects.LONGARRAY) * LONG_SIZE_IN_BYTES)
offset = 0
for item in reference_objects.LONGARRAY:
struct.pack_into(formats['long'], content, offset, item)
offset += LONG_SIZE_IN_BYTES
client_message.add_frame(Frame(content))
@staticmethod
def encode_string_frame(client_message, value=None):
if value is None:
value = reference_objects.STRING
client_message.add_frame(Frame(value.encode('utf-8')))
@staticmethod
def encode_data_frame(client_message):
client_message.add_frame(Frame(reference_objects.DATA))
@staticmethod
def encode_sqlpage(client_message):
client_message.add_frame(BEGIN_FRAME)
client_message.add_frame(Frame(bytearray([True])))
obj = [4]
content = bytearray(INT_SIZE_IN_BYTES)
offset = 0
for item in obj:
FixSizedParamEncoder.pack_into(content, offset, 'int', item)
offset += INT_SIZE_IN_BYTES
client_message.add_frame(Frame(content))
content = bytearray(5 + INT_SIZE_IN_BYTES * 4)
FixSizedParamEncoder.pack_into(content, 0, 'byte', 2)
FixSizedParamEncoder.pack_into(content, 1, 'int', 4)
obj = [1, 2, 3, 4]
offset = 5
for item in obj:
FixSizedParamEncoder.pack_into(content, offset, 'int', item)
offset += INT_SIZE_IN_BYTES
client_message.add_frame(Frame(content))
client_message.add_frame(END_FRAME)
@staticmethod
def encode_long_byte_array_entry_list(client_message):
client_message.add_frame(BEGIN_FRAME)
VarSizedParamEncoder.encode_byte_array_frame(client_message)
client_message.add_frame(END_FRAME)
FixSizedParamEncoder.encode_fix_sized_list_frame(client_message, 'long')
def encode_uuid_integer_list_entry_list(self, client_message):
client_message.add_frame(BEGIN_FRAME)
self.encode_var_sized_frame(client_message, 'List_Integer')
client_message.add_frame(END_FRAME)
FixSizedParamEncoder.encode_fix_sized_list_frame(client_message, 'UUID')
def encoder_for(self, param_type):
encoder = self.var_sized_encoders.get(param_type, None)
if encoder is not None:
return encoder
if (param_type in CustomTypes) or (param_type in CustomConfigTypes):
return self.encoder.custom_type_encoder.encoder_for(param_type)
test_output_directories = {
SupportedLanguages.JAVA: 'hazelcast/src/test/java/com/hazelcast/client/protocol/compatibility',
# SupportedLanguages.CPP: '',
# SupportedLanguages.CS: '',
# SupportedLanguages.PY: '',
# SupportedLanguages.TS: '',
# SupportedLanguages.GO: '',
}
binary_output_directories = {
SupportedLanguages.JAVA: 'hazelcast/src/test/resources',
# SupportedLanguages.CPP: '',
# SupportedLanguages.CS: '',
# SupportedLanguages.PY: '',
# SupportedLanguages.TS: '',
# SupportedLanguages.GO: '',
}
reference_objects_dict = {
'boolean': 'aBoolean',
'byte': 'aByte',
'int': 'anInt',
'long': 'aLong',
'UUID': 'aUUID',
'byteArray': 'aByteArray',
'longArray': 'aLongArray',
'String': 'aString',
'Data': 'aData',
'EntryList_Integer_UUID': 'aListOfIntegerToUUID',
'EntryList_UUID_Long': 'aListOfUuidToLong',
'EntryList_Integer_Long': 'aListOfIntegerToLong',
'EntryList_Integer_Integer': 'aListOfIntegerToInteger',
'List_Integer': 'aListOfIntegers',
'List_Long': 'aListOfLongs',
'List_UUID': 'aListOfUUIDs',
'Address': 'anAddress',
'CacheEventData': 'aCacheEventData',
'DistributedObjectInfo': 'aDistributedObjectInfo',
'QueryCacheEventData': 'aQueryCacheEventData',
'RaftGroupId': 'aRaftGroupId',
'ScheduledTaskHandler': 'aScheduledTaskHandler',
'SimpleEntryView': 'aSimpleEntryView',
'WanReplicationRef': 'aWanReplicationRef',
'Xid': 'anXid',
'ErrorHolder': 'anErrorHolder',
'StackTraceElement': 'aStackTraceElement',
'CacheSimpleEntryListenerConfig': 'aCacheSimpleEntryListenerConfig',
'EventJournalConfig': 'anEventJournalConfig',
'EvictionConfigHolder': 'anEvictionConfigHolder',
'HotRestartConfig': 'aHotRestartConfig',
'ListenerConfigHolder': 'aListenerConfigHolder',
'AttributeConfig': 'aAttributeConfig',
'IndexConfig': 'anIndexConfig',
'BitmapIndexOptions': 'aBitmapIndexOptions',
'MapStoreConfigHolder': 'aMapStoreConfigHolder',
'MerkleTreeConfig': 'aMerkleTreeConfig',
'NearCacheConfigHolder': 'aNearCacheConfigHolder',
'NearCachePreloaderConfig': 'aNearCachePreloaderConfig',
'PredicateConfigHolder': 'aPredicateConfigHolder',
'QueryCacheConfigHolder': 'aQueryCacheConfigHolder',
'QueueStoreConfigHolder': 'aQueueStoreConfigHolder',
'RingbufferStoreConfigHolder': 'aRingbufferStoreConfigHolder',
'TimedExpiryPolicyFactoryConfig': 'aTimedExpiryPolicyFactoryConfig',
'DurationConfig': 'aDurationConfig',
'ClientBwListEntry': 'aClientBwListEntry',
'MCEvent': 'aMCEvent',
'EntryList_String_String': 'aListOfStringToString',
'EntryList_String_byteArray': 'aListOfStringToByteArray',
'EntryList_Long_byteArray': 'aListOfLongToByteArray',
'EntryList_String_EntryList_Integer_Long': 'aListOfStringToListOfIntegerToLong',
'EntryList_UUID_UUID': 'aListOfUUIDToUUID',
'EntryList_UUID_List_Integer': 'aListOfUUIDToListOfIntegers',
'EntryList_Data_Data': 'aListOfDataToData',
'EntryList_Data_List_Data': 'aListOfDataToListOfData',
'Map_String_String': 'aMapOfStringToString',
'Map_EndpointQualifier_Address': 'aMapOfEndpointQualifierToAddress',
'List_byteArray': 'aListOfByteArrays',
'List_CacheEventData': 'aListOfCacheEventData',
'List_CacheSimpleEntryListenerConfig': 'aListOfCacheSimpleEntryListenerConfigs',
'List_Data': 'aListOfData',
'List_List_Data': 'aListOfListOfData',
'ListCN_Data': 'aListOfData',
'List_ListCN_Data': 'aListOfListOfData',
'List_DistributedObjectInfo': 'aListOfDistributedObjectInfo',
'List_ListenerConfigHolder': 'aListOfListenerConfigHolders',
'List_AttributeConfig': 'aListOfAttributeConfigs',
'List_IndexConfig': 'aListOfIndexConfigs',
'List_MemberInfo': 'aListOfMemberInfos',
'List_QueryCacheConfigHolder': 'aListOfQueryCacheConfigHolders',
'List_QueryCacheEventData': 'aListOfQueryCacheEventData',
'List_ScheduledTaskHandler': 'aListOfScheduledTaskHandler',
'List_String': 'aListOfStrings',
'List_Xid': 'aListOfXids',
'List_StackTraceElement': 'aListOfStackTraceElements',
'List_ClientBwListEntry': 'aListOfClientBwListEntries',
'List_MCEvent': 'aListOfMCEvents',
'List_SqlColumnMetadata': 'aListOfSqlColumnMetadata',
'MergePolicyConfig': 'aMergePolicyConfig',
'CacheConfigHolder': 'aCacheConfigHolder',
'AnchorDataListHolder': 'anAnchorDataListHolder',
'PagingPredicateHolder': 'aPagingPredicateHolder',
'SqlQueryId': 'anSqlQueryId',
'SqlError': 'anSqlError',
'SqlColumnMetadata': 'anSqlColumnMetadata',
'CPMember': 'aCpMember',
'List_CPMember': 'aListOfCpMembers',
'MigrationState': 'aMigrationState',
'SqlPage': 'aSqlPage'
}
def create_environment_for_binary_generator(lang):
env = Environment(loader=PackageLoader(lang.value + '.binary', '.'), extensions=['jinja2.ext.loopcontrols'])
env.trim_blocks = True
env.lstrip_blocks = True
env.keep_trailing_newline = False
env.filters['capital'] = capital
env.globals['lang_types_encode'] = language_specific_funcs['lang_types_encode'][lang]
env.globals['reference_objects_dict'] = reference_objects_dict
env.globals['get_version_as_number'] = get_version_as_number
env.globals['new_params'] = new_params
env.globals['filter_new_params'] = filter_new_params
return env
binary_test_names = {
SupportedLanguages.JAVA: lambda version: '{type}Compatibility{null}Test_' + '_'.join(version.split('.')) + '.java',
# SupportedLanguages.CPP: '',
# SupportedLanguages.CS: '',
# SupportedLanguages.PY: '',
# SupportedLanguages.TS: '',
# SupportedLanguages.GO: '',
}
| 41.803313
| 119
| 0.695706
|
3274152719814b53020602fee540527b2afc4a45
| 481
|
py
|
Python
|
intervention_herd/intervention_herd/make_rand_pop.py
|
molkjar/bachelor
|
a0591691b820c6c8a45d16f8d55f3a7e80ea384b
|
[
"MIT"
] | null | null | null |
intervention_herd/intervention_herd/make_rand_pop.py
|
molkjar/bachelor
|
a0591691b820c6c8a45d16f8d55f3a7e80ea384b
|
[
"MIT"
] | null | null | null |
intervention_herd/intervention_herd/make_rand_pop.py
|
molkjar/bachelor
|
a0591691b820c6c8a45d16f8d55f3a7e80ea384b
|
[
"MIT"
] | null | null | null |
import covasim as cv
import pandas as pd
import sciris as sc
import numpy as np
import population_random as pr
if __name__ == '__main__':
#Without dispersion
cv.set_seed(1)
people = pr.generate_people(n_people=200e3, n_contacts=20, dispersion=None)
sc.saveobj('randppl.pop', people)
# With dispersion
cv.set_seed(1)
peopleDisp = pr.generate_people(n_people=200e3, n_contacts=20, dispersion=1.5)
sc.saveobj('randppl_disp.pop', peopleDisp)
| 21.863636
| 82
| 0.721414
|
b701c4e350bdb8afea11e79c30b30a4a7776cfc8
| 1,072
|
py
|
Python
|
flaskr/models/Review.py
|
RemineralizedWater/E-Commerce_Platform_Backend_354thestars
|
649787cbbb88655f8424067ecac87e994d6feff2
|
[
"Zlib"
] | null | null | null |
flaskr/models/Review.py
|
RemineralizedWater/E-Commerce_Platform_Backend_354thestars
|
649787cbbb88655f8424067ecac87e994d6feff2
|
[
"Zlib"
] | null | null | null |
flaskr/models/Review.py
|
RemineralizedWater/E-Commerce_Platform_Backend_354thestars
|
649787cbbb88655f8424067ecac87e994d6feff2
|
[
"Zlib"
] | null | null | null |
from datetime import date
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import update
from sqlalchemy import Column, Integer, String, ForeignKey, Date, Sequence, Float
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import relationship
from flaskr.db import Base
from flaskr.models.User import User
from flaskr.models.Product import Product
class Review(Base):
__tablename__ = 'review'
user_id = Column(Integer, ForeignKey('user.id'), primary_key=True)
product_id = Column(Integer, ForeignKey('product.id'), primary_key=True)
comment = Column(String)
score = Column(Float)
reply = Column(String)
user = relationship('User')
product = relationship('Product')
def to_json(self):
"""Returns the instance of status as a JSON
Returns:
dict -- JSON representation of the order
"""
return {
'username': self.user.username,
'comment': self.comment,
'score': self.score,
'reply': self.reply
}
| 30.628571
| 81
| 0.682836
|
4b286a302d7a5a660d475493c91fcf11934900c9
| 645
|
py
|
Python
|
presidio-analyzer/tests/test_us_bank_recognizer.py
|
gitcarbs/presidio
|
b547eb154212aea43e398bf533b2f9ef435cc6bb
|
[
"MIT"
] | null | null | null |
presidio-analyzer/tests/test_us_bank_recognizer.py
|
gitcarbs/presidio
|
b547eb154212aea43e398bf533b2f9ef435cc6bb
|
[
"MIT"
] | 1
|
2018-07-31T08:26:23.000Z
|
2018-07-31T08:26:23.000Z
|
presidio-analyzer/tests/test_us_bank_recognizer.py
|
gitcarbs/presidio
|
b547eb154212aea43e398bf533b2f9ef435cc6bb
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from tests import assert_result
from presidio_analyzer.predefined_recognizers import UsBankRecognizer
us_bank_recognizer = UsBankRecognizer()
entities = ["US_BANK_NUMBER"]
class TestUsBankRecognizer(TestCase):
def test_us_bank_account_invalid_number(self):
num = '1234567'
results = us_bank_recognizer.analyze(num, entities)
assert len(results) == 0
def test_us_bank_account_no_context(self):
num = '945456787654'
results = us_bank_recognizer.analyze(num, entities)
assert len(results) == 1
assert_result(results[0], entities[0], 0, 12, 0.05)
| 26.875
| 69
| 0.727132
|
765c2c742defd07a20d902622a760796afbb7c5c
| 172
|
py
|
Python
|
chipy_org/apps/profiles/forms.py
|
smandekar1/chipy.org
|
d70c6936a389d3e8b7c3530360f08d01aa4fa7f3
|
[
"MIT"
] | null | null | null |
chipy_org/apps/profiles/forms.py
|
smandekar1/chipy.org
|
d70c6936a389d3e8b7c3530360f08d01aa4fa7f3
|
[
"MIT"
] | null | null | null |
chipy_org/apps/profiles/forms.py
|
smandekar1/chipy.org
|
d70c6936a389d3e8b7c3530360f08d01aa4fa7f3
|
[
"MIT"
] | null | null | null |
from django.forms import ModelForm
from .models import UserProfile
class ProfileForm(ModelForm):
class Meta:
model = UserProfile
exclude = ('user',)
| 17.2
| 34
| 0.686047
|
f8bff3898521b5c6d738582e51074bc5ff32a577
| 1,749
|
py
|
Python
|
setup.py
|
Learn-code-strategies/DEXBot
|
ed85b12d8ad8d6ec373fd216a98e55b72f90b860
|
[
"MIT"
] | null | null | null |
setup.py
|
Learn-code-strategies/DEXBot
|
ed85b12d8ad8d6ec373fd216a98e55b72f90b860
|
[
"MIT"
] | null | null | null |
setup.py
|
Learn-code-strategies/DEXBot
|
ed85b12d8ad8d6ec373fd216a98e55b72f90b860
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
from dexbot import VERSION, APP_NAME
from setuptools import setup, find_packages
from distutils.command import build as build_module
cmd_class = {}
console_scripts = ['dexbot-cli = dexbot.cli:main']
install_requires = [
'bitshares==0.2.1',
'uptick==0.2.0',
'click',
'sqlalchemy',
'ruamel.yaml>=0.15.37',
'sdnotify',
'appdirs>=1.4.3',
'pycryptodomex==3.6.4',
'websocket-client==0.53.0'
]
class BuildCommand(build_module.build):
def run(self):
self.run_command('build_ui')
build_module.build.run(self)
try:
from pyqt_distutils.build_ui import build_ui
cmd_class = {
'build_ui': build_ui,
'build': BuildCommand
}
console_scripts.append('dexbot-gui = dexbot.gui:main')
install_requires.extend(["pyqt-distutils"])
except BaseException as e:
print("GUI not available: {}".format(e))
setup(
name=APP_NAME,
version=VERSION,
description='Trading bot for the DEX (BitShares)',
long_description=open('README.md').read(),
author='Codaone Oy',
author_email='support@codaone.com',
maintainer='Codaone Oy',
maintainer_email='support@codaone.com',
url='http://www.github.com/codaone/dexbot',
keywords=['DEX', 'bot', 'trading', 'api', 'blockchain'],
packages=find_packages(),
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
],
cmdclass=cmd_class,
entry_points={
'console_scripts': console_scripts
},
install_requires=install_requires,
include_package_data=True,
)
| 26.5
| 60
| 0.652373
|
dd54076f9feac354131ed27a6a15682987386005
| 13,628
|
py
|
Python
|
miricoord/mrs/toolversions/mrs_pipetools_cdp8b.py
|
mengesser/miricoord
|
e4f7bef16f8a2e6f1d46c97a2b3d78cd50de7bec
|
[
"BSD-3-Clause"
] | null | null | null |
miricoord/mrs/toolversions/mrs_pipetools_cdp8b.py
|
mengesser/miricoord
|
e4f7bef16f8a2e6f1d46c97a2b3d78cd50de7bec
|
[
"BSD-3-Clause"
] | null | null | null |
miricoord/mrs/toolversions/mrs_pipetools_cdp8b.py
|
mengesser/miricoord
|
e4f7bef16f8a2e6f1d46c97a2b3d78cd50de7bec
|
[
"BSD-3-Clause"
] | null | null | null |
#
"""
Useful python tools for working with the MIRI MRS.
This contains cdp8b specific code.
This version of the tools uses the JWST pipeline implementation
of the distortion solution to do the transformations,
and hooks into offline versions of the CRDS reference
files contained within this github repository.
Convert JWST v2,v3 locations (in arcsec) to MIRI MRS SCA x,y pixel locations.
Note that the pipeline uses a 0-indexed detector pixel (1032x1024) convention while
SIAF uses a 1-indexed detector pixel convention. The CDP files define
the origin such that (0,0) is the middle of the lower-left pixel
(1032x1024)- note that this is a CHANGE of convention from earlier CDP!
Author: David R. Law (dlaw@stsci.edu)
REVISION HISTORY:
10-Oct-2018 Written by David Law (dlaw@stsci.edu)
"""
import os as os
import numpy as np
import pdb as pdb
from astropy.modeling import models
from asdf import AsdfFile
from jwst import datamodels
from jwst.assign_wcs import miri
#############################
# Return the tools version
def version():
return 'cdp8b'
#############################
# Set the relevant CRDS distortion file based on channel (e.g., '1A')
def get_fitsreffile(channel):
rootdir=os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
rootdir=os.path.join(rootdir,'data/crds/')
wavefile=rootdir+'jwst_miri_mrs_wavelengthrange_cdp8b.asdf'
# Channel should be of the form (e.g.) '1A', '3C', etc
# See https://jwst-crds.stsci.edu//display_result/52cef902-ad77-4792-9964-d26a0a8a96a8
if ((channel is '1A')or(channel is '2A')):
distfile=rootdir+'jwst_miri_mrs12A_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs12A_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs12A_specwcs_cdp8b.asdf'
elif ((channel is '3A')or(channel is '4A')):
distfile=rootdir+'jwst_miri_mrs34A_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs34A_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs34A_specwcs_cdp8b.asdf'
elif ((channel is '1B')or(channel is '2B')):
distfile=rootdir+'jwst_miri_mrs12B_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs12B_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs12B_specwcs_cdp8b.asdf'
elif ((channel is '3B')or(channel is '4B')):
distfile=rootdir+'jwst_miri_mrs34B_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs34B_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs34B_specwcs_cdp8b.asdf'
elif ((channel is '1C')or(channel is '2C')):
distfile=rootdir+'jwst_miri_mrs12C_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs12C_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs12C_specwcs_cdp8b.asdf'
elif ((channel is '3C')or(channel is '4C')):
distfile=rootdir+'jwst_miri_mrs34C_distortion_cdp8b.asdf'
regfile=rootdir+'jwst_miri_mrs34C_regions_cdp8b.asdf'
specfile=rootdir+'jwst_miri_mrs34C_specwcs_cdp8b.asdf'
else:
print('Failure!')
refs={'distortion': distfile, 'regions':regfile, 'specwcs':specfile, 'wavelengthrange':wavefile}
return refs
#############################
# Convenience function to turn '1A' type name into '12' and 'SHORT' type names
def bandchan(channel):
# Channel should be of the form (e.g.) '1A', '3C', etc
if ((channel is '1A')or(channel is '2A')):
newband='SHORT'
newchannel='12'
elif ((channel is '3A')or(channel is '4A')):
newband='SHORT'
newchannel='34'
elif ((channel is '1B')or(channel is '2B')):
newband='MEDIUM'
newchannel='12'
elif ((channel is '3B')or(channel is '4B')):
newband='MEDIUM'
newchannel='34'
elif ((channel is '1C')or(channel is '2C')):
newband='LONG'
newchannel='12'
elif ((channel is '3C')or(channel is '4C')):
newband='LONG'
newchannel='34'
else:
newband='FAIL'
newchannel='FAIL'
return newband,newchannel
#############################
# Convenience function to turn '12A' type name into '1A' and '2A' type names
def channel(detband):
if (detband == '12A'):
ch1='1A'
ch2='2A'
elif (detband == '12B'):
ch1='1B'
ch2='2B'
elif (detband == '12C'):
ch1='1C'
ch2='2C'
elif (detband == '34A'):
ch1='3A'
ch2='4A'
elif (detband == '34B'):
ch1='3B'
ch2='4B'
elif (detband == '34C'):
ch1='3C'
ch2='4C'
else:
ch1='FAIL'
ch2='FAIL'
return ch1,ch2
#############################
# Convenience function to return the rough middle wavelength of a given channel
# Note that this ISNT exact, just some valid value
def midwave(channel):
if (channel is '1A'):
thewave=5.32
elif (channel is '1B'):
thewave=6.145
elif (channel is '1C'):
thewave=7.09
elif (channel is '2A'):
thewave=8.135
elif (channel is '2B'):
thewave=9.395
elif (channel is '2C'):
thewave=10.85
elif (channel is '3A'):
thewave=12.505
elif (channel is '3B'):
thewave=14.5
elif (channel is '3C'):
thewave=16.745
elif (channel is '4A'):
thewave=19.29
elif (channel is '4B'):
thewave=22.47
elif (channel is '4C'):
thewave=26.2
return thewave
#############################
# Convenience function to return model distortion object
# for the x,y to alpha,beta,lam transform
def xytoablmodel(channel,**kwargs):
# Construct the reference data model in general JWST imager type
input_model = datamodels.ImageModel()
# Convert input of type '1A' into the band and channel that pipeline needs
theband,thechan=bandchan(channel)
# Set the filter in the data model meta header
input_model.meta.instrument.band = theband
input_model.meta.instrument.channel = thechan
# If passed input refs keyword, unpack and use it
if ('refs' in kwargs):
therefs=kwargs['refs']
# Otherwise use default reference files
else:
therefs=get_fitsreffile(channel)
distortion = miri.detector_to_abl(input_model, therefs)
# Return the distortion object that can then be queried
return distortion
#############################
# Convenience function to return model distortion object
# for the alpha,beta to v2,v3 transform
def abtov2v3model(channel,**kwargs):
# Construct the reference data model in general JWST imager type
input_model = datamodels.ImageModel()
# Convert input of type '1A' into the band and channel that pipeline needs
theband,thechan=bandchan(channel)
# Set the filter in the data model meta header
input_model.meta.instrument.band = theband
input_model.meta.instrument.channel = thechan
# If passed input refs keyword, unpack and use it
if ('refs' in kwargs):
therefs=kwargs['refs']
# Otherwise use default reference files
else:
therefs=get_fitsreffile(channel)
# The pipeline transform actually uses the triple
# (alpha,beta,lambda) -> (v2,v3,lambda)
basedistortion = miri.abl_to_v2v3l(input_model, therefs)
distortion = basedistortion
# Therefore we need to hack a reasonable wavelength onto our input, run transform,
# then hack it back off again
thewave=midwave(channel)
# Duplicate the beta value at first, then replace with wavelength value
map=models.Mapping((0,1,1)) | models.Identity(1) & models.Identity(1) & models.Const1D(thewave)
map.inverse=models.Mapping((0,1),n_inputs=3)
allmap= map | distortion | map.inverse
allmap.inverse= map | distortion.inverse | map.inverse
# Return the distortion object that can then be queried
return allmap
#############################
# MRS test reference data
# Provided by Polychronis 5/9/19
mrs_ref_data = {
'1A': {'x': np.array([76.0,354.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([0.05765538365149925,-0.017032619150995743]),
'beta': np.array([-0.17721014379699995,-1.240471006579]),
'lam': np.array([5.348546577257886,5.5136420569934925]),
'v2': np.array([-503.57285226785064,-503.4979806620663]),
'v3': np.array([-318.5749892859028,-317.5090073056335]),
},
'1B': {'x': np.array([76.0,355.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.012990737471741731,0.10766447914943456]),
'beta': np.array([-0.17720417669099997,-1.240429236837]),
'lam': np.array([6.168310398808807,6.358007642348213]),
'v2': np.array([-503.643100332753,-503.37069816112813]),
'v3': np.array([-318.72773306477103,-317.6938248759762]),
},
'1C': {'x': np.array([78.0,356.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([0.02871804339196271,-0.028315822861031847]),
'beta': np.array([-0.17720218765499984,-1.240415313585]),
'lam': np.array([7.006608159574103,7.218455147089075]),
'v2': np.array([-503.5598371896608,-503.45975848303885]),
'v3': np.array([-318.4367657801553,-317.3779485524358]),
},
'2A': {'x': np.array([574.0,719.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([0.022862344416012093,0.024104763006107532]),
'beta': np.array([0.27971818633699996,-1.3985909316610001]),
'lam': np.array([8.139463800053713, 8.423879719165456]),
'v2': np.array([-503.65782416704644, -503.3907046961389]),
'v3': np.array([-319.3709764579651, -317.71318662530217]),
},
'2B': {'x': np.array([570.0,715.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.04101483043351095,-0.021964438108625473]),
'beta': np.array([0.27972605223,-1.39863026115]),
'lam': np.array([9.49091778668766, 9.826112199836349]),
'v2': np.array([-503.872441161987, -503.58468453126545]),
'v3': np.array([-319.6066193816802, -317.9526192173689]),
},
'2C': {'x': np.array([573.0,718.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.08065540123411097,-0.07196315905207484]),
'beta': np.array([0.2797221192789996, -1.3986105964070001]),
'lam': np.array([10.909558387414732,11.292658213110698]),
'v2': np.array([-503.7062367371822, -503.4292038385116]),
'v3': np.array([-319.54349206004053, -317.8886490566051]),
},
'3A': {'x': np.array([918.0,827.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.14902640584477922, -0.1394111481404252]),
'beta': np.array([0.5847206674920002, -1.7541620024759998]),
'lam': np.array([12.586085291551054, 12.171803779467552]),
'v2': np.array([-504.57532179184557, -504.3428404141017]),
'v3': np.array([-319.3596209726561, -317.0363338552647]),
},
'3B': {'x': np.array([919.0, 827.0]),
'y': np.array([512.0, 700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.18610616903060873, 0.05223448620927229]),
'beta': np.array([0.5847206674920002, -1.7541620024759998]),
'lam': np.array([14.60074101845329, 14.120353260795175]),
'v2': np.array([-504.29128783278026, -503.81513623681207]),
'v3': np.array([-319.5977726217362, -317.30169796071453]),
},
'3C': {'x': np.array([917.0,826.0]),
'y': np.array([512.0,700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.08917305254544772, -0.09924683542340063]),
'beta': np.array([0.5847206674920002, -1.7541620024759998]),
'lam': np.array([16.860616228418674, 16.305648049347006]),
'v2': np.array([-504.29179372150304, -504.06099473540036]),
'v3': np.array([-319.5864222556306, -317.26146053061063]),
},
'4A': {'x': np.array([195.0, 232.0]),
'y': np.array([512.0, 700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.18281231856817595, -0.10820926727846612]),
'beta': np.array([2.2961330928359995, -1.640095066308]),
'lam': np.array([19.42967253041467, 18.733785802367724]),
'v2': np.array([-503.73916258138155, -502.9287085654886]),
'v3': np.array([-321.7198475574414, -317.8596067111157]),
},
'4B': {'x': np.array([192.0, 229.0]),
'y': np.array([512.0, 700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.03596952007447607, -0.10259402857181654]),
'beta': np.array([2.2961365363689996, -1.640097525977]),
'lam': np.array([22.47574268879503, 21.67074830984225]),
'v2': np.array([-503.7051048327475, -502.9891450100565]),
'v3': np.array([-321.6637327196876, -317.78403487305536]),
},
'4C': {'x': np.array([194.0, 231.0]),
'y': np.array([512.0, 700.0]),
's': np.array([10,4]),
'alpha': np.array([-0.0661930805678849, -0.01176625661012924]),
'beta': np.array([2.296119318687, -1.640085227631]),
'lam': np.array([26.292379242285914, 25.350694577065074]),
'v2': np.array([-503.7171854824459, -502.9282547181127]),
'v3': np.array([-321.57006077329663, -317.7252303132135]),
}
}
| 39.387283
| 105
| 0.613663
|
d38b46ee92bc099a5b7e8558a7ef63a9015bb028
| 678
|
py
|
Python
|
confrontiv/core.py
|
correctiv/confrontiv
|
ddb34004c32bf20ff49dcb6fba7852593820d921
|
[
"MIT"
] | 2
|
2017-09-25T20:41:54.000Z
|
2017-10-04T07:34:44.000Z
|
confrontiv/core.py
|
correctiv/confrontiv
|
ddb34004c32bf20ff49dcb6fba7852593820d921
|
[
"MIT"
] | null | null | null |
confrontiv/core.py
|
correctiv/confrontiv
|
ddb34004c32bf20ff49dcb6fba7852593820d921
|
[
"MIT"
] | null | null | null |
import json
import unicodecsv
from .models import Recipient, InquiryRequest
def make_inquiry_requests_from_file(inquiry, file):
reader = unicodecsv.DictReader(file)
for lineno, line in enumerate(reader, 1):
try:
recipient = Recipient.objects.get(slug=line['recipient'])
except Recipient.DoesNotExist:
raise ValueError('Recipient on line %s not found' % lineno)
if not recipient.groups.filter(id=inquiry.group_id).exists():
raise ValueError('Recipient %s not in inquiry group' % recipient)
data = json.loads(line['data'])
InquiryRequest.objects.create_from_inquiry(inquiry, recipient, data)
| 35.684211
| 77
| 0.696165
|
8171ac5f0b589b09a50b4eb378795e05ca8e1a21
| 641
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Intro_to_Python/assignments/A08_callable_objects.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Intro_to_Python/assignments/A08_callable_objects.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Intro_to_Python/assignments/A08_callable_objects.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
""" Assignment 8
1. Write a function that will return True if the passed-in number is even,
and False if it is odd.
2. Write a second function that will call the first with values 0-6 and print
each result on a new line.
3. Invoke the second function.
The signature of the first function should be: `is_even(num: int) -> bool`
The signature of the second function should be: `test_is_even() -> None` """
def is_even(num: int) -> bool:
return num % 2 == 0
is_even_lambda = lambda num: num % 2 == 0
def test_is_even() -> None:
for i in range(7):
print(is_even_lambda(i))
if __name__ == "__main__":
test_is_even()
| 23.740741
| 77
| 0.687988
|
f83de2996a98d75efcbaa66480b7cd210869a886
| 4,124
|
py
|
Python
|
tools/wptrunner/wptrunner/browsers/webkit.py
|
jimmywarting/wpt
|
75d80fa43c763935dff59b3c6b21f4dffa9b03b7
|
[
"BSD-3-Clause"
] | 575
|
2015-06-18T23:58:20.000Z
|
2022-03-23T09:32:39.000Z
|
tools/wptrunner/wptrunner/browsers/webkit.py
|
jimmywarting/wpt
|
75d80fa43c763935dff59b3c6b21f4dffa9b03b7
|
[
"BSD-3-Clause"
] | 113
|
2015-05-04T09:58:14.000Z
|
2022-01-31T19:35:03.000Z
|
tools/wptrunner/wptrunner/browsers/webkit.py
|
almajlis/wpt
|
a1d4dd189a5bdca857845b374946b8002c41d199
|
[
"BSD-3-Clause"
] | 52
|
2015-07-14T10:40:50.000Z
|
2022-03-15T01:11:49.000Z
|
from .base import Browser, ExecutorBrowser, require_arg
from .base import get_timeout_multiplier, certificate_domain_list # noqa: F401
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executorwebdriver import (WebDriverTestharnessExecutor, # noqa: F401
WebDriverRefTestExecutor, # noqa: F401
WebDriverCrashtestExecutor) # noqa: F401
from ..executors.executorwebkit import WebKitDriverWdspecExecutor # noqa: F401
from ..webdriver_server import WebKitDriverServer
__wptrunner__ = {"product": "webkit",
"check_args": "check_args",
"browser": "WebKitBrowser",
"browser_kwargs": "browser_kwargs",
"executor": {"testharness": "WebDriverTestharnessExecutor",
"reftest": "WebDriverRefTestExecutor",
"wdspec": "WebKitDriverWdspecExecutor",
"crashtest": "WebDriverCrashtestExecutor"},
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",
"run_info_extras": "run_info_extras",
"timeout_multiplier": "get_timeout_multiplier"}
def check_args(**kwargs):
require_arg(kwargs, "binary")
require_arg(kwargs, "webdriver_binary")
require_arg(kwargs, "webkit_port")
def browser_kwargs(logger, test_type, run_info_data, config, **kwargs):
return {"binary": kwargs["binary"],
"webdriver_binary": kwargs["webdriver_binary"],
"webdriver_args": kwargs.get("webdriver_args")}
def capabilities_for_port(server_config, **kwargs):
port_name = kwargs["webkit_port"]
if port_name in ["gtk", "wpe"]:
port_key_map = {"gtk": "webkitgtk"}
browser_options_port = port_key_map.get(port_name, port_name)
browser_options_key = "%s:browserOptions" % browser_options_port
return {
"browserName": "MiniBrowser",
"browserVersion": "2.20",
"platformName": "ANY",
browser_options_key: {
"binary": kwargs["binary"],
"args": kwargs.get("binary_args", []),
"certificates": certificate_domain_list(server_config.domains_set, kwargs["host_cert_path"])}}
return {}
def executor_kwargs(logger, test_type, server_config, cache_manager, run_info_data,
**kwargs):
executor_kwargs = base_executor_kwargs(test_type, server_config,
cache_manager, run_info_data, **kwargs)
executor_kwargs["close_after_done"] = True
executor_kwargs["capabilities"] = capabilities_for_port(server_config,
**kwargs)
return executor_kwargs
def env_extras(**kwargs):
return []
def env_options():
return {}
def run_info_extras(**kwargs):
return {"webkit_port": kwargs["webkit_port"]}
class WebKitBrowser(Browser):
"""Generic WebKit browser is backed by WebKit's WebDriver implementation,
which is supplied through ``wptrunner.webdriver.WebKitDriverServer``.
"""
def __init__(self, logger, binary, webdriver_binary=None,
webdriver_args=None, **kwargs):
Browser.__init__(self, logger)
self.binary = binary
self.server = WebKitDriverServer(self.logger, binary=webdriver_binary,
args=webdriver_args)
def start(self, **kwargs):
self.server.start(block=False)
def stop(self, force=False):
self.server.stop(force=force)
def pid(self):
return self.server.pid
def is_alive(self):
# TODO(ato): This only indicates the driver is alive,
# and doesn't say anything about whether a browser session
# is active.
return self.server.is_alive()
def cleanup(self):
self.stop()
def executor_browser(self):
return ExecutorBrowser, {"webdriver_url": self.server.url}
| 37.153153
| 110
| 0.619059
|
09fd755d6235f35f80d394d4f6057dcd57280768
| 4,146
|
py
|
Python
|
awx/main/models/schedules.py
|
whitelighter18/ansible_awx
|
77a4f1dd86c0e63ea2040156d63d69e78bd4e8e5
|
[
"Apache-2.0"
] | null | null | null |
awx/main/models/schedules.py
|
whitelighter18/ansible_awx
|
77a4f1dd86c0e63ea2040156d63d69e78bd4e8e5
|
[
"Apache-2.0"
] | null | null | null |
awx/main/models/schedules.py
|
whitelighter18/ansible_awx
|
77a4f1dd86c0e63ea2040156d63d69e78bd4e8e5
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import re
import logging
import datetime
import dateutil.rrule
# Django
from django.db import models
from django.db.models.query import QuerySet
from django.utils.timezone import now, make_aware, get_default_timezone
from django.utils.translation import ugettext_lazy as _
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import * # noqa
from awx.main.models.jobs import LaunchTimeConfig
from awx.main.utils import ignore_inventory_computed_fields
from awx.main.consumers import emit_channel_notification
logger = logging.getLogger('awx.main.models.schedule')
__all__ = ['Schedule']
class ScheduleFilterMethods(object):
def enabled(self, enabled=True):
return self.filter(enabled=enabled)
def before(self, dt):
return self.filter(next_run__lt=dt)
def after(self, dt):
return self.filter(next_run__gt=dt)
def between(self, begin, end):
return self.after(begin).before(end)
class ScheduleQuerySet(ScheduleFilterMethods, QuerySet):
pass
class ScheduleManager(ScheduleFilterMethods, models.Manager):
use_for_related_objects = True
def get_queryset(self):
return ScheduleQuerySet(self.model, using=self._db)
class Schedule(CommonModel, LaunchTimeConfig):
class Meta:
app_label = 'main'
ordering = ['-next_run']
objects = ScheduleManager()
unified_job_template = models.ForeignKey(
'UnifiedJobTemplate',
related_name='schedules',
on_delete=models.CASCADE,
)
enabled = models.BooleanField(
default=True,
help_text=_("Enables processing of this schedule.")
)
dtstart = models.DateTimeField(
null=True,
default=None,
editable=False,
help_text=_("The first occurrence of the schedule occurs on or after this time.")
)
dtend = models.DateTimeField(
null=True,
default=None,
editable=False,
help_text=_("The last occurrence of the schedule occurs before this time, aftewards the schedule expires.")
)
rrule = models.CharField(
max_length=255,
help_text=_("A value representing the schedules iCal recurrence rule.")
)
next_run = models.DateTimeField(
null=True,
default=None,
editable=False,
help_text=_("The next time that the scheduled action will run.")
)
def __unicode__(self):
return u'%s_t%s_%s_%s' % (self.name, self.unified_job_template.id, self.id, self.next_run)
def get_absolute_url(self, request=None):
return reverse('api:schedule_detail', kwargs={'pk': self.pk}, request=request)
def get_job_kwargs(self):
config_data = self.prompts_dict()
job_kwargs, rejected, errors = self.unified_job_template._accept_or_ignore_job_kwargs(**config_data)
if errors:
logger.info('Errors creating scheduled job: {}'.format(errors))
job_kwargs['_eager_fields'] = {'launch_type': 'scheduled', 'schedule': self}
return job_kwargs
def update_computed_fields(self):
future_rs = dateutil.rrule.rrulestr(self.rrule, forceset=True)
next_run_actual = future_rs.after(now())
self.next_run = next_run_actual
try:
self.dtstart = future_rs[0]
except IndexError:
self.dtstart = None
self.dtend = None
if 'until' in self.rrule.lower():
match_until = re.match(".*?(UNTIL\=[0-9]+T[0-9]+Z)", self.rrule)
until_date = match_until.groups()[0].split("=")[1]
self.dtend = make_aware(datetime.datetime.strptime(until_date, "%Y%m%dT%H%M%SZ"), get_default_timezone())
if 'count' in self.rrule.lower():
self.dtend = future_rs[-1]
emit_channel_notification('schedules-changed', dict(id=self.id, group_name='schedules'))
with ignore_inventory_computed_fields():
self.unified_job_template.update_computed_fields()
def save(self, *args, **kwargs):
self.update_computed_fields()
super(Schedule, self).save(*args, **kwargs)
| 31.409091
| 117
| 0.676556
|
4c1dbbabecfb71eee93ce7498e203e9de3b66353
| 3,521
|
py
|
Python
|
mirage/libs/wireless_utils/packetQueue.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
mirage/libs/wireless_utils/packetQueue.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
mirage/libs/wireless_utils/packetQueue.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
import threading
import time
from queue import Queue
class StoppableThread(threading.Thread):
'''
This class is just a simplistic implementation of a stoppable thread.
The target parameter allows to provide a specific function to run continuously in background.
If the stop method is called, the thread is interrupted.
'''
def __init__(self,target=None):
super().__init__(target=target)
self.daemon = True
self.signal = True
def run(self):
try:
while self.signal:
self._target(*(self._args))
except (KeyboardInterrupt,EOFError):
pass
def stop(self):
'''
This method stops the thread.
'''
self.signal = False
class PacketQueue:
'''
This class implements a Packet (``mirage.libs.wireless_utils.packets.Packet``) queue, and provides an API to manipulate it.
The Emitter class (``mirage.libs.wireless.Emitter``) and the Receiver class (``mirage.libs.wireless.Receiver``) inherit from it.
The private method _task implements a watchdog, allowing to put or get some packets in the queue and manipulate them. This watchdog is called continuously thanks to a Stoppable Thread (``mirage.libs.wireless_utils.packetQueue.StoppableThread``).
Some parameters may be passed to the constructor :
* waitEmpty : it indicates if the queue should wait for an empty queue before stopping
* autoStart : it indicates if the queue shoud start immediatly after the instanciation of the class
'''
def __init__(self, waitEmpty = False, autoStart = True):
self.waitEmpty = waitEmpty
self.autoStart = autoStart
self.queue = Queue()
self.isStarted = False
if self.isDeviceUp():
self.device.subscribe(self)
self.daemonThread = None
if autoStart:
self.start()
def isDeviceUp(self):
'''
This method allow to check if the Device (``mirage.libs.wireless_utils.device.Device``) linked to this Packet Queue is up and running.
'''
return hasattr(self,"device") and self.device is not None and self.device.isUp()
def _createDaemonThread(self):
self.daemonThread = StoppableThread(target = self._task)
'''
def __del__(self):
self.stop()
'''
def start(self):
'''
This method starts the associated stoppable thread in order to continuously call the watchdog function (_task).
'''
if self.daemonThread is None:
self._createDaemonThread()
if not self.isStarted:
self.daemonThread.start()
self.isStarted = True
def stop(self):
'''
This method stops the associated stoppable thread.
'''
if hasattr(self,"isStarted") and self.isStarted:
if self.waitEmpty:
while not self.isEmpty():
time.sleep(0.05) # necessary ?
self.daemonThread.stop()
self.daemonThread = None
self.isStarted = False
def restart(self):
'''
This method restarts the associated stoppable thread.
'''
self.stop()
self.start()
def isBusy(self):
'''
This method indicates if the queue contains some datas.
:return: boolean indicating if the queue contains some datas
:rtype: bool
'''
return not self.isEmpty()
def isEmpty(self):
'''
This method indicates if the queue is empty.
:return: boolean indicating if the queue is empty
:rtype: bool
'''
return self.queue.empty()
def clear(self):
while not self.isEmpty():
self.queue.get(False)
def _task(self):
pass
def __getattr__(self, name):
if (name != "device" and hasattr(self.device, name) and
(name in self.device.__class__.sharedMethods or name == "hasCapabilities")):
return getattr(self.device,name)
else:
raise AttributeError
| 28.168
| 246
| 0.719966
|
f62dd0cf30aa7d06251f0d35b712d555c8aed514
| 1,024
|
py
|
Python
|
tests/modeltests/get_object_or_404/models.py
|
yarko/django
|
90b6240c8753ece3e52cafc37e1088b0646b843f
|
[
"BSD-3-Clause"
] | 1
|
2015-05-14T11:23:36.000Z
|
2015-05-14T11:23:36.000Z
|
tests/modeltests/get_object_or_404/models.py
|
yarko/django
|
90b6240c8753ece3e52cafc37e1088b0646b843f
|
[
"BSD-3-Clause"
] | null | null | null |
tests/modeltests/get_object_or_404/models.py
|
yarko/django
|
90b6240c8753ece3e52cafc37e1088b0646b843f
|
[
"BSD-3-Clause"
] | null | null | null |
"""
35. DB-API Shortcuts
``get_object_or_404()`` is a shortcut function to be used in view functions for
performing a ``get()`` lookup and raising a ``Http404`` exception if a
``DoesNotExist`` exception was raised during the ``get()`` call.
``get_list_or_404()`` is a shortcut function to be used in view functions for
performing a ``filter()`` lookup and raising a ``Http404`` exception if a
``DoesNotExist`` exception was raised during the ``filter()`` call.
"""
from django.db import models
class Author(models.Model):
name = models.CharField(max_length=50)
def __unicode__(self):
return self.name
class ArticleManager(models.Manager):
def get_query_set(self):
return super(ArticleManager, self).get_query_set().filter(authors__name__icontains='sir')
class Article(models.Model):
authors = models.ManyToManyField(Author)
title = models.CharField(max_length=50)
objects = models.Manager()
by_a_sir = ArticleManager()
def __unicode__(self):
return self.title
| 31.030303
| 97
| 0.71875
|
feb2c972773a701c92b537d4004e0e97c1be5203
| 1,363
|
py
|
Python
|
pipelines/rss.py
|
d3vzer0/vulnerabilities-pipeline
|
a6df7a233eaf66a8cb7c81aed69b377274ca3cf7
|
[
"MIT"
] | 1
|
2021-11-22T09:57:20.000Z
|
2021-11-22T09:57:20.000Z
|
pipelines/rss.py
|
d3vzer0/vulnerabilities-pipeline
|
a6df7a233eaf66a8cb7c81aed69b377274ca3cf7
|
[
"MIT"
] | 1
|
2021-08-03T21:56:03.000Z
|
2021-08-09T15:05:40.000Z
|
pipelines/rss.py
|
d3vzer0/vulnerabilities-pipeline
|
a6df7a233eaf66a8cb7c81aed69b377274ca3cf7
|
[
"MIT"
] | null | null | null |
from dagster import pipeline, composite_solid, PresetDefinition, ModeDefinition
from dagster.utils import file_relative_path
from solids.rss.main import (get_latest_entries,
format_entries, update_entries, get_all_entries)
from solids.summarize.main import summarize_feeds
from solids.elastic.main import elastic_upsert
from solids.elastic.resource import es_resource
from solids.rss.resource import miniflux_rss
FEEDS = [
'microsoft',
'redhat',
'ncsc',
'f5',
'cisa',
'qualys'
]
@composite_solid
def composite_rss():
unread_entries = get_latest_entries()
unread_formatted = format_entries(unread_entries)
historic_entries = get_all_entries()
historic_formatted = format_entries(historic_entries)
feed_summary = summarize_feeds(unread_formatted, historic_formatted)
elastic_upsert(feed_summary)
update_entries(unread_formatted)
@pipeline(
mode_defs=[
ModeDefinition(
'prod', resource_defs={'rss': miniflux_rss, 'es': es_resource}
)
],
preset_defs=[
PresetDefinition.from_files(
'prod',
config_files=[
file_relative_path(__file__, 'presets/prod_rss.yaml')
],
mode='prod',
),
]
)
def sync_new_rss():
for feed in FEEDS:
get_rss = composite_rss.alias(feed)
get_rss()
| 27.816327
| 79
| 0.694057
|
2567caae7253284a9a344b99ea33a15dbcca05d6
| 39,803
|
py
|
Python
|
ozpcenter/api/listing/views.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2018-10-05T17:03:01.000Z
|
2018-10-05T17:03:01.000Z
|
ozpcenter/api/listing/views.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 1
|
2017-01-06T19:20:32.000Z
|
2017-01-06T19:20:32.000Z
|
ozpcenter/api/listing/views.py
|
emosher/ozp-backend
|
d31d00bb8a28a8d0c999813f616b398f41516244
|
[
"Apache-2.0"
] | 7
|
2016-12-16T15:42:05.000Z
|
2020-09-05T01:11:27.000Z
|
"""
Listing Views
"""
import logging
import operator
from django.shortcuts import get_object_or_404
from django.db.models import Min
from django.db.models.functions import Lower
from rest_framework import filters
from rest_framework import status
from rest_framework import viewsets
from rest_framework.response import Response
from rest_framework.decorators import list_route
from ozpcenter import errors
from ozpcenter import pagination
from ozpcenter import permissions
from ozpcenter.pipe import pipes
from ozpcenter.pipe import pipeline
from ozpcenter.recommend import recommend_utils
import ozpcenter.api.listing.model_access as model_access
import ozpcenter.api.listing.serializers as serializers
import ozpcenter.model_access as generic_model_access
import ozpcenter.api.listing.model_access_es as model_access_es
logger = logging.getLogger('ozp-center.' + str(__name__))
class DocUrlViewSet(viewsets.ModelViewSet):
"""
TODO: Remove?
"""
permission_classes = (permissions.IsUser,)
queryset = model_access.get_all_doc_urls()
serializer_class = serializers.DocUrlSerializer
class ReviewViewSet(viewsets.ModelViewSet):
"""
Reviews for a given listing
The unique_together contraints on models.Review make it difficult to
use the standard Serializer classes (see the Note here:
http://www.django-rest-framework.org/api-guide/serializers/#specifying-read-only-fields)
Primarily for that reason, we forgo using Serializers for POST and PUT
actions
ModelViewSet for getting all Reviews for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/listing/{pk}/review
Summary:
Find a Review Entry by ID
Response:
200 - Successful operation - ReviewSerializer
DELETE /api/listing/{pk}/review
Summary:
Delete a Review Entry by ID
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ReviewSerializer
filter_backends = (filters.OrderingFilter,)
pagination_class = pagination.ReviewLimitOffsetPagination
ordering_fields = ('id', 'listing', 'text', 'rate', 'edited_date', 'created_date')
ordering = ('-created_date')
def get_queryset(self):
return model_access.get_reviews(self.request.user.username)
def list(self, request, listing_pk=None):
queryset = self.get_queryset().filter(listing=listing_pk, review_parent__isnull=True)
queryset = self.filter_queryset(queryset)
# it appears that because we override the queryset here, we must
# manually invoke the pagination methods
page = self.paginate_queryset(queryset)
if page is not None:
serializer = serializers.ReviewSerializer(page, context={'request': request}, many=True)
return self.get_paginated_response(serializer.data)
serializer = serializers.ReviewSerializer(queryset, many=True, context={'request': request})
return Response(serializer.data)
def retrieve(self, request, pk=None, listing_pk=None):
queryset = self.get_queryset().get(pk=pk, listing=listing_pk)
serializer = serializers.ReviewSerializer(queryset, context={'request': request})
return Response(serializer.data)
def create(self, request, listing_pk=None):
"""
Create a new review
"""
listing = model_access.get_listing_by_id(request.user.username, listing_pk, True)
serializer = serializers.ReviewSerializer(data=request.data, context={'request': request, 'listing': listing}, partial=True)
if not serializer.is_valid():
logger.error('{0!s}'.format(serializer.errors), extra={'request': request})
raise errors.ValidationException('{0}'.format(serializer.errors))
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def update(self, request, pk=None, listing_pk=None):
"""
Update an existing review
"""
listing = model_access.get_listing_by_id(request.user.username, listing_pk, True)
review = model_access.get_review_by_id(pk)
serializer = serializers.ReviewSerializer(review, data=request.data, context={'request': request, 'listing': listing}, partial=True)
if not serializer.is_valid():
logger.error('{0!s}'.format(serializer.errors), extra={'request': request})
raise errors.ValidationException('{0}'.format(serializer.errors))
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, pk=None, listing_pk=None):
queryset = self.get_queryset()
review = get_object_or_404(queryset, pk=pk)
model_access.delete_listing_review(request.user.username, review)
return Response(status=status.HTTP_204_NO_CONTENT)
class SimilarViewSet(viewsets.ModelViewSet):
"""
Similar Apps for a given listing
# TODO (Rivera 2017-2-22) Implement Similar Listing Algorithm
Primarily for that reason, we forgo using Serializers for POST and PUT
actions
ModelViewSet for getting all Similar Apps for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/listing/{pk}/similar
Summary:
Find a Similar App Entry by ID
Response:
200 - Successful operation - ListingSerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingSerializer
# pagination_class = pagination.StandardPagination
def get_queryset(self, listing_pk):
approval_status = self.request.query_params.get('approval_status', None)
# org = self.request.query_params.get('org', None)
orgs = self.request.query_params.getlist('org', False)
enabled = self.request.query_params.get('enabled', None)
ordering = self.request.query_params.getlist('ordering', None)
if enabled:
enabled = enabled.lower()
if enabled in ['true', '1']:
enabled = True
else:
enabled = False
listings = model_access.get_similar_listings(self.request.user.username, listing_pk)
if approval_status:
listings = listings.filter(approval_status=approval_status)
if orgs:
listings = listings.filter(agency__title__in=orgs)
if enabled is not None:
listings = listings.filter(is_enabled=enabled)
# have to handle this case manually because the ordering includes an app multiple times
# if there are multiple owners. We instead do sorting by case insensitive compare of the
# app owner that comes first alphabetically
param = [s for s in ordering if 'owners__display_name' == s or '-owners__display_name' == s]
if ordering is not None and param:
orderby = 'min'
if param[0].startswith('-'):
orderby = '-min'
listings = listings.annotate(min=Min(Lower('owners__display_name'))).order_by(orderby)
self.ordering = None
return listings
def list(self, request, listing_pk=None):
queryset = self.filter_queryset(self.get_queryset(listing_pk))
serializer = serializers.ListingSerializer(queryset, context={'request': request}, many=True)
similar_listings = pipeline.Pipeline(recommend_utils.ListIterator(serializer.data),
[pipes.ListingDictPostSecurityMarkingCheckPipe(self.request.user.username),
pipes.LimitPipe(10)]).to_list()
return Response(similar_listings)
class RecommendationFeedbackViewSet(viewsets.ModelViewSet):
"""
Recommendation Feedback for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/listing/{pk}/feedback
Summary:
Find a feedback Entry by ID
Response:
200 - Successful operation - ListingSerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.RecommendationFeedbackSerializer
# pagination_class = pagination.StandardPagination
def get_queryset(self, listing):
recommendation_feedback_query = model_access.get_recommendation_feedback(self.request.user.username, listing)
return recommendation_feedback_query
def list(self, request, listing_pk=None):
listing = model_access.get_listing_by_id(request.user.username, listing_pk, True)
queryset = self.get_queryset(listing)
if not queryset:
return Response({'feedback': 0}, status=status.HTTP_404_NOT_FOUND)
serializer = serializers.RecommendationFeedbackSerializer(queryset, context={'request': request, 'listing': listing})
data = serializer.data
return Response(data)
def create(self, request, listing_pk=None):
listing = model_access.get_listing_by_id(request.user.username, listing_pk, True)
serializer = serializers.RecommendationFeedbackSerializer(data=request.data, context={'request': request, 'listing': listing})
if not serializer.is_valid():
logger.error('{0!s}'.format(serializer.errors), extra={'request': request})
raise errors.ValidationException('{0}'.format(serializer.errors))
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def destroy(self, request, listing_pk=None, pk=None):
listing = model_access.get_listing_by_id(request.user.username, listing_pk, True)
feedback = model_access.get_recommendation_feedback(request.user.username, listing)
if feedback is None:
return Response(status=status.HTTP_404_NOT_FOUND)
model_access.delete_recommendation_feedback(listing, feedback)
return Response(status=status.HTTP_204_NO_CONTENT)
class ListingTypeViewSet(viewsets.ModelViewSet):
"""
Listing Types
ModelViewSet for getting all Listing Types for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/listingtype
Summary:
Get a list of all system-wide ListingType entries
Response:
200 - Successful operation - [ListingTypeSerializer]
POST /api/listingtype
Summary:
Add a ListingType
Request:
data: ListingTypeSerializer Schema
Response:
200 - Successful operation - ListingTypeSerializer
GET /api/listingtype/{pk}
Summary:
Find a ListingType Entry by ID
Response:
200 - Successful operation - ListingTypeSerializer
PUT /api/listingtype/{pk}
Summary:
Update a ListingType Entry by ID
PATCH /api/listingtype/{pk}
Summary:
Update (Partial) a ListingType Entry by ID
DELETE /api/listingtype/{pk}
Summary:
Delete a ListingType Entry by ID
"""
permission_classes = (permissions.IsUser,)
queryset = model_access.get_all_listing_types()
serializer_class = serializers.ListingTypeSerializer
class ListingUserActivitiesViewSet(viewsets.ModelViewSet):
"""
ListingUserActivitiesViewSet endpoints are read-only
ModelViewSet for getting all Listing User Activities for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/self/listings/activity
Summary:
Get a list of all system-wide ListingUserActivities entries
Response:
200 - Successful operation - [ListingActivitySerializer]
GET /api/self/listings/activity/{pk}
Summary:
Find a Listing User Activity Entry by ID
Response:
200 - Successful operation - ListingActivitySerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingActivitySerializer
def get_queryset(self):
return model_access.get_listing_activities_for_user(
self.request.user.username)
def list(self, request):
queryset = self.get_queryset()
page = self.paginate_queryset(queryset)
if page is not None:
serializer = serializers.ListingActivitySerializer(page,
context={'request': request}, many=True)
return self.get_paginated_response(serializer.data)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
class ListingActivitiesViewSet(viewsets.ModelViewSet):
"""
ListingActivity endpoints are read-only
ModelViewSet for getting all Listing Activities for a given listing
Access Control
===============
- AppsMallSteward can view
URIs
======
GET /api/listings/activity
Summary:
Get a list of all system-wide ListingActivities entries
Response:
200 - Successful operation - [ListingActivitySerializer]
GET /api/listings/activity/{pk}
Summary:
Find a Listing User Activity Entry by ID
Response:
200 - Successful operation - ListingActivitySerializer
"""
permission_classes = (permissions.IsOrgSteward,)
serializer_class = serializers.ListingActivitySerializer
def get_queryset(self):
return model_access.get_all_listing_activities(
self.request.user.username).order_by('-activity_date')
def list(self, request):
queryset = self.get_queryset()
# it appears that because we override the queryset here, we must
# manually invoke the pagination methods
page = self.paginate_queryset(queryset)
if page is not None:
serializer = serializers.ListingActivitySerializer(page,
context={'request': request}, many=True)
return self.get_paginated_response(serializer.data)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
class ListingActivityViewSet(viewsets.ModelViewSet):
"""
ListingActivity endpoints are read-only
ModelViewSet for getting all Listing Activities for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/listing/{pk}/activity
Summary:
Find a Listing Activity Entry by ID
Response:
200 - Successful operation - ListingActivitySerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingActivitySerializer
def get_queryset(self):
return model_access.get_all_listing_activities(
self.request.user.username).order_by('-activity_date')
def list(self, request, listing_pk=None):
queryset = self.get_queryset().filter(listing=listing_pk)
# it appears that because we override the queryset here, we must
# manually invoke the pagination methods
page = self.paginate_queryset(queryset)
if page is not None:
serializer = serializers.ListingActivitySerializer(page,
context={'request': request}, many=True)
return self.get_paginated_response(serializer.data)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
def retrieve(self, request, pk=None, listing_pk=None):
queryset = self.get_queryset().get(pk=pk, listing=listing_pk)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request})
return Response(serializer.data)
class ListingPendingDeletionViewSet(viewsets.ModelViewSet):
"""
ModelViewSet for getting all Listing Pending Deletions
Access Control
===============
- All users can view
URIs
======
POST /api/listing/{pk}/pendingdeletion
Summary:
Add a ListingPendingDeletion
Request:
data: ListingPendingDeletionSerializer Schema
Response:
200 - Successful operation - ListingActivitySerializer
GET /api/listing/{pk}/pendingdeletion
Summary:
Find a ListingPendingDeletion Entry by ID
Response:
200 - Successful operation - ListingActivitySerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingActivitySerializer
def get_queryset(self):
queryset = model_access.get_pending_deletion_listings(
self.request.user.username)
return queryset
def list(self, request, listing_pk=None):
queryset = self.get_queryset().filter(listing__id=listing_pk)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
def create(self, request, listing_pk=None):
try:
user = generic_model_access.get_profile(request.user.username)
listing = model_access.get_listing_by_id(request.user.username,
listing_pk)
description = request.data['description'] if 'description' in request.data else None
if not description:
raise errors.InvalidInput('Description is required when pending a listing for deletion')
listing = model_access.pending_delete_listing(user, listing, description)
return Response(data={"listing": {"id": listing.id}},
status=status.HTTP_201_CREATED)
except Exception as e:
logger.error('Exception: {}'.format(e), extra={'request': request})
raise errors.RequestException('Error pending listing for deletion')
class ListingRejectionViewSet(viewsets.ModelViewSet):
"""
ModelViewSet for getting all Listing Rejections
Access Control
===============
- AppsMallSteward can view
URIs
======
POST /api/listing/{pk}/rejection
Summary:
Add a ListingRejection
Request:
data: ListingRejectionSerializer Schema
Response:
200 - Successful operation - ListingActivitySerializer
GET /api/listing/{pk}/rejection
Summary:
Find a ListingRejection Entry by ID
Response:
200 - Successful operation - ListingActivitySerializer
"""
permission_classes = (permissions.IsOrgStewardOrReadOnly,)
serializer_class = serializers.ListingActivitySerializer
def get_queryset(self):
queryset = model_access.get_rejection_listings(
self.request.user.username)
return queryset
def list(self, request, listing_pk=None):
queryset = self.get_queryset().filter(listing__id=listing_pk)
serializer = serializers.ListingActivitySerializer(queryset,
context={'request': request}, many=True)
return Response(serializer.data)
def create(self, request, listing_pk=None):
try:
user = generic_model_access.get_profile(request.user.username)
listing = model_access.get_listing_by_id(request.user.username,
listing_pk)
rejection_description = request.data['description']
listing = model_access.reject_listing(user, listing,
rejection_description)
return Response(data={"listing": {"id": listing.id}},
status=status.HTTP_201_CREATED)
except Exception as e:
logger.error('Exception: {}'.format(e), extra={'request': request})
raise errors.RequestException('Error rejecting listing')
class ScreenshotViewSet(viewsets.ModelViewSet):
"""
Listing Types
ModelViewSet for getting all Screenshots for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/screenshot/
Summary:
Get a list of all system-wide Screenshot entries
Response:
200 - Successful operation - [ScreenshotSerializer]
POST /api/screenshot/
Summary:
Add a Screenshot
Request:
data: ScreenshotSerializer Schema
Response:
200 - Successful operation - ScreenshotSerializer
GET /api/screenshot/{pk}
Summary:
Find a Screenshot Entry by ID
Response:
200 - Successful operation - ScreenshotSerializer
PUT /api/screenshot/{pk}
Summary:
Update a Screenshot Entry by ID
PATCH /api/screenshot/{pk}
Summary:
Update (Partial) a Screenshot Entry by ID
DELETE /api/screenshot/{pk}
Summary:
Delete a Screenshot Entry by ID
"""
permission_classes = (permissions.IsUser,)
queryset = model_access.get_all_screenshots()
serializer_class = serializers.ScreenshotSerializer
class TagViewSet(viewsets.ModelViewSet):
"""
Listing Types
ModelViewSet for getting all Tags for a given listing
Access Control
===============
- All users can view
URIs
======
GET /api/tag/
Summary:
Get a list of all system-wide Tag entries
Response:
200 - Successful operation - [TagSerializer]
POST /api/tag/
Summary:
Add a Tag
Request:
data: TagSerializer Schema
Response:
200 - Successful operation - TagSerializer
GET /api/tag/{pk}
Summary:
Find a Tag Entry by ID
Response:
200 - Successful operation - TagSerializer
PUT /api/tag/{pk}
Summary:
Update a Tag Entry by ID
PATCH /api/tag/{pk}
Summary:
Update (Partial) a Tag Entry by ID
DELETE /api/tag/{pk}
Summary:
Delete a Tag Entry by ID
"""
permission_classes = (permissions.IsUser,)
queryset = model_access.get_all_tags()
serializer_class = serializers.TagSerializer
class ListingViewSet(viewsets.ModelViewSet):
"""
Get all listings this user can see
Listing Types
ModelViewSet for getting all Listings
Access Control
===============
- All users can view
URIs
======
GET /api/listing
Summary:
Get a list of all system-wide Listings
Response:
200 - Successful operation - [ListingSerializer]
POST /api/listing/
Summary:
Add a Listing
Request:
data: ListingSerializer Schema
Response:
200 - Successful operation - ListingSerializer
GET /api/listing/{pk}
Summary:
Find a Listing Entry by ID
Response:
200 - Successful operation - ListingSerializer
PUT /api/listing/{pk}
Summary:
Update a Listing Entry by ID
PATCH /api/listing/{pk}
Summary:
Update (Partial) a Listing Entry by ID
DELETE /api/listing/{pk}
Summary:
Delete a Listing Entry by ID
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingSerializer
filter_backends = (filters.SearchFilter, filters.OrderingFilter)
search_fields = ('title', 'id', 'owners__display_name', 'agency__title', 'agency__short_name',)
ordering_fields = ('id', 'agency__title', 'agency__short_name', 'is_enabled', 'is_featured',
'edited_date', 'security_marking', 'is_private', 'approval_status', 'approved_date',
'avg_rate', 'total_votes')
case_insensitive_ordering_fields = ('title',)
ordering = ('is_deleted', '-edited_date')
def get_queryset(self):
approval_status = self.request.query_params.get('approval_status', None)
# org = self.request.query_params.get('org', None)
orgs = self.request.query_params.getlist('org', False)
enabled = self.request.query_params.get('enabled', None)
ordering = self.request.query_params.get('ordering', None)
owners_id = self.request.query_params.get('owners_id', None)
if enabled:
enabled = enabled.lower()
if enabled in ['true', '1']:
enabled = True
else:
enabled = False
if ordering:
ordering = [s.strip() for s in ordering.split(',')]
else:
# always default to last modified for consistency
ordering = ['-edited_date']
listings = model_access.get_listings(self.request.user.username)
if owners_id:
listings = listings.filter(owners__id=owners_id)
if approval_status:
listings = listings.filter(approval_status=approval_status)
if orgs:
listings = listings.filter(agency__short_name__in=orgs)
if enabled is not None:
listings = listings.filter(is_enabled=enabled)
# have to handle this case manually because the ordering includes an app multiple times
# if there are multiple owners. We instead do sorting by case insensitive compare of the
# app owner that comes first alphabetically
param = [s for s in ordering if 'owners__display_name' == s or '-owners__display_name' == s]
if ordering is not None and param:
orderby = 'min'
if param[0].startswith('-'):
orderby = '-min'
listings = listings.annotate(min=Min(Lower('owners__display_name'))).order_by(orderby)
self.ordering = None
# Django REST filters are canse sensitive by default, so we handle case_insensitive fields
# manually. May want to abstract this functionality in an OrderingFilter sub-class
case_insensitive_ordering = [s for s in ordering if s in self.case_insensitive_ordering_fields or
s.startswith('-') and s[1:] in self.case_insensitive_ordering_fields]
if ordering is not None and case_insensitive_ordering:
for field in case_insensitive_ordering:
if field.startswith('-'):
listings = listings.order_by(Lower(field[1:])).reverse()
else:
listings = listings.order_by(Lower(field))
self.ordering = None
return listings
def list(self, request):
queryset = serializers.ListingSerializer.setup_eager_loading(self.get_queryset())
queryset = self.filter_queryset(queryset)
counts_data = model_access.put_counts_in_listings_endpoint(queryset)
# it appears that because we override the queryset here, we must
# manually invoke the pagination methods
page = self.paginate_queryset(queryset)
if page is not None:
serializer = serializers.ListingSerializer(page,
context={'request': request}, many=True)
r = self.get_paginated_response(serializer.data)
# add counts to response
r.data['counts'] = counts_data
return r
serializer = serializers.ListingSerializer(queryset,
context={'request': request}, many=True)
r = Response(serializer.data)
# add counts to response
counts = {'counts': counts_data}
r.data.append(counts)
return r
def create(self, request):
"""
Save a new Listing - only title is required
Sample Payload:
{
"title":"My Test App",
"description":"This is the full description of my app",
"descriptionShort":"short app description",
"contacts":[
{
"type":"Technical Support",
"name":"Tech Support Contact",
"organization":"ABC Inc",
"email":"tsc@gmail.com",
"securePhone":"555-555-5555",
"unsecurePhone":"111-222-3454"
}
],
"tags":[
"tag1",
"tag2"
],
"type":"Web Application",
"usage_requirements":"None",
"system_requirements":"None",
"versionName":"1.0.0",
"launchUrl":"http://www.google.com/myApp",
"whatIsNew":"Nothing is new",
"owners":[
{
"username":"alan"
}
],
"agency":"Test Organization",
"categories":[
"Entertainment",
"Media and Video"
],
"intents":[
"application/json/edit",
"application/json/view"
],
"docUrls":[
{
"name":"wiki",
"url":"http://www.wikipedia.com/myApp"
}
],
"smallIconId":"b0b54993-0668-4419-98e8-787e4c3a2dc2",
"largeIconId":"e94128ab-d32d-4241-8820-bd2c69a64a87",
"bannerIconId":"ecf79771-79a0-4884-a36d-5820c79c6d72",
"featuredBannerIconId":"c3e6a369-4773-485e-b369-5cebaa331b69",
"changeLogs":[
],
"screenshots":[
{
"smallImageId":"0b8db892-b669-4e86-af23-d899cb4d4d91",
"largeImageId":"80957d25-f34b-48bc-b860-b353cfd9e101"
}
]
}
---
parameters:
- name: body
required: true
paramType: body
parameters_strategy:
form: replace
query: replace
omit_serializer: true
"""
# logger.debug('inside ListingViewSet.create', extra={'request': request})
serializer = serializers.ListingSerializer(data=request.data,
context={'request': request}, partial=True)
if not serializer.is_valid():
logger.error('{0!s}'.format(serializer.errors), extra={'request': request})
raise errors.ValidationException('{0}'.format(serializer.errors))
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
def retrieve(self, request, pk=None):
"""
Get a Listing by id
"""
queryset = self.get_queryset().get(pk=pk)
serializer = serializers.ListingSerializer(queryset,
context={'request': request})
# TODO: Refactor in future to use django ordering (mlee)
temp = serializer.data.get('screenshots')
temp.sort(key=operator.itemgetter('order'))
return Response(serializer.data)
def destroy(self, request, pk=None):
"""
Delete a listing
"""
queryset = self.get_queryset()
listing = get_object_or_404(queryset, pk=pk)
description = request.data['description'] if 'description' in request.data else None
if not description:
raise errors.InvalidInput('Description is required when deleting a listing')
model_access.delete_listing(request.user.username, listing, description)
return Response(status=status.HTTP_204_NO_CONTENT)
def update(self, request, pk=None):
"""
Update a Listing
Sample payload:
{
"id":45,
"title":"My Test App",
"description":"This is the full description of my app",
"descriptionShort":"short app description",
"contacts":[
{
"securePhone":"555-555-5555",
"unsecurePhone":"111-222-3454",
"email":"tsc@gmail.com",
"organization":"ABC Inc",
"name":"Tech Support Contact",
"type":"Technical Support"
}
],
"totalReviews":0,
"avgRate":0,
"totalRate1":0,
"totalRate2":0,
"totalRate3":0,
"totalRate4":0,
"height":null,
"width":null,
"totalRate5":0,
"totalVotes":0,
"tags":[
"tag2",
"tag1"
],
"type":"Web Application",
"uuid":"e378c427-bba6-470c-b2f3-e550b9129504",
"usage_requirements":"None",
"system_requirements":"None",
"iframe_compatible":false,
"versionName":"1.0.0",
"launchUrl":"http://www.google.com/myApp",
"whatIsNew":"Nothing is new",
"owners":[
{
"displayName":"kevink",
"username":"kevink",
"id":5
}
],
"agency":"Test Organization",
"agencyShort":"TO",
"currentRejection":null,
"isEnabled":true,
"categories":[
"Media and Video",
"Entertainment"
],
"editedDate":"2015-08-12T10:53:47.036+0000",
"intents":[
"application/json/edit",
"application/json/view"
],
"docUrls":[
{
"url":"http://www.wikipedia.com/myApp",
"name":"wiki"
}
],
"approvalStatus":"IN_PROGRESS",
"isFeatured":false,
"smallIconId":"b0b54993-0668-4419-98e8-787e4c3a2dc2",
"largeIconId":"e94128ab-d32d-4241-8820-bd2c69a64a87",
"bannerIconId":"ecf79771-79a0-4884-a36d-5820c79c6d72",
"featuredBannerIconId":"c3e6a369-4773-485e-b369-5cebaa331b69",
"changeLogs":[
],
"screenshots":[
{
"largeImageId":"80957d25-f34b-48bc-b860-b353cfd9e101",
"smallImageId":"0b8db892-b669-4e86-af23-d899cb4d4d91"
}
]
}
"""
# logger.debug('inside ListingViewSet.update', extra={'request': request})
instance = self.get_queryset().get(pk=pk)
serializer = serializers.ListingSerializer(instance, data=request.data, context={'request': request}, partial=True)
# logger.debug('created ListingSerializer', extra={'request': request})
if not serializer.is_valid():
logger.error('{0!s}'.format(serializer.errors), extra={'request': request})
raise errors.ValidationException('{0}'.format(serializer.errors))
serializer.save()
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, pk=None):
"""
TODO: Probably don't use this (PATCH)
"""
pass
class ListingUserViewSet(viewsets.ModelViewSet):
"""
Listing Types
Get all listings owned by this user
ModelViewSet for getting all ListingUserViewSets
Access Control
===============
- All users can view
URIs
======
GET /api/self/listing
Summary:
Get a list of all system-wide Listing User entries
Response:
200 - Successful operation - [ListingSerializer]
GET /api/self/listing/{pk}
Summary:
Find a ListingUserViewSet Entry by ID
Response:
200 - Successful operation - ListingSerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingSerializer
def get_queryset(self):
return model_access.get_self_listings(self.request.user.username)
def list(self, request):
return super(ListingUserViewSet, self).list(self, request)
class ListingSearchViewSet(viewsets.ModelViewSet):
"""
Search for listings
ModelViewSet for getting all Listing Searches
Access Control
===============
- All users can view
URIs
======
GET /api/listings/search
Summary:
Get a list of all system-wide Listing Search entries
Response:
200 - Successful operation - [ListingSerializer]
GET /api/listings/search/{pk}
Summary:
Find a ListingSearchViewSet Entry by ID
Response:
200 - Successful operation - ListingSerializer
"""
permission_classes = (permissions.IsUser,)
serializer_class = serializers.ListingSerializer
filter_backends = (filters.SearchFilter, )
search_fields = ('title', 'description', 'description_short', 'tags__name')
def get_queryset(self):
filter_params = {}
categories = self.request.query_params.getlist('category', False)
agencies = self.request.query_params.getlist('agency', False)
listing_types = self.request.query_params.getlist('type', False)
if categories:
filter_params['categories'] = categories
if agencies:
filter_params['agencies'] = agencies
if listing_types:
filter_params['listing_types'] = listing_types
return model_access.filter_listings(self.request.user.username,
filter_params)
def list(self, request):
"""
---
# YAML (must be separated by `---`)
omit_serializer: false
parameters:
- name: search
description: Text to search
paramType: query
- name: category
description: List of category names (AND logic)
required: false
paramType: query
allowMultiple: true
- name: agency
description: List of agencies
paramType: query
- name: type
description: List of application types
paramType: query
- name: limit
description: Max number of listings to retrieve
paramType: query
- name: offset
description: Offset
paramType: query
responseMessages:
- code: 401
message: Not authenticated
"""
return super(ListingSearchViewSet, self).list(self, request)
class ElasticsearchListingSearchViewSet(viewsets.ViewSet):
"""
Elasticsearch Listing Search Viewset
It must support pagination. offset, limit
GET /api/listings/essearch/?search=6&offset=0&limit=24 HTTP/1.1
GET /api/listings/essearch/?search=6&offset=0&limit=24 HTTP/1.1
GET api/listings/essearch/?search=6&offset=0&category=Education&limit=24&type=web+application&agency=Minitrue&agency=Miniluv&minscore=0.4
ModelViewSet for searching all Listings with Elasticsearch
Access Control
===============
- All users can view
URIs
======
GET /api/listings/essearch
"""
permission_classes = (permissions.IsUser,)
def list(self, request):
current_request_username = request.user.username
params_obj = model_access_es.SearchParamParser(request)
results = model_access_es.search(current_request_username, params_obj)
return Response(results, status=status.HTTP_200_OK)
@list_route(methods=['get'], permission_classes=[permissions.IsUser])
def suggest(self, request):
current_request_username = request.user.username
params_obj = model_access_es.SearchParamParser(self.request)
results = model_access_es.suggest(current_request_username, params_obj)
return Response(results, status=status.HTTP_200_OK)
def create(self, request):
"""
This method is not supported
"""
raise errors.NotImplemented('HTTP Verb Not Supported')
def retrieve(self, request, pk=None):
"""
This method is not supported
"""
raise errors.NotImplemented('HTTP Verb Not Supported')
def update(self, request, pk=None):
"""
This method is not supported
"""
raise errors.NotImplemented('HTTP Verb Not Supported')
def partial_update(self, request, pk=None):
"""
This method is not supported
"""
raise errors.NotImplemented('HTTP Verb Not Supported')
def destroy(self, request, pk=None):
"""
This method is not supported
"""
raise errors.NotImplemented('HTTP Verb Not Supported')
| 33.447899
| 141
| 0.634726
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.