hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
548be68a4be4ce8e389208606dd772dad630cd84
| 4,947
|
py
|
Python
|
kanka-manager/test.py
|
davidbradlycurtis/kanka-manager
|
f44f814c6d9433a40cb1edc558baac12f26b31ad
|
[
"MIT"
] | null | null | null |
kanka-manager/test.py
|
davidbradlycurtis/kanka-manager
|
f44f814c6d9433a40cb1edc558baac12f26b31ad
|
[
"MIT"
] | null | null | null |
kanka-manager/test.py
|
davidbradlycurtis/kanka-manager
|
f44f814c6d9433a40cb1edc558baac12f26b31ad
|
[
"MIT"
] | null | null | null |
import requests
import yaml
import json
import os
import sys
import logging
from kankaclient.client import KankaClient
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s')
LOGGER = logging.getLogger('KankaManagement')
class SpaceDumper(yaml.SafeDumper):
# HACK: insert blank lines between top-level objects
# inspired by https://stackoverflow.com/a/44284819/3786245
def write_line_break(self, data=None):
super().write_line_break(data)
if len(self.indents) == 1:
super().write_line_break('# ============================================================================================\n')
def write_data(file, data):
success = False
if os.path.isfile(file):
try:
with open(file, 'w') as output_yaml:
output_yaml.write(yaml.dump(data, Dumper=SpaceDumper, sort_keys=False))
success = True
except FileNotFoundError:
pass
#LOG ERROR
return success
def read_data(file):
data = None
if os.path.isfile(file):
try:
with open(file, 'r') as input_yaml:
data = yaml.safe_load(input_yaml.read())
except FileNotFoundError:
pass
#LOG ERROR
return data
def test_characters(client):
characters = client.characters.get_all()
vincent = client.characters.get('Vincent Von Hess')
vincent_by_id = client.characters.get(677748)
test_character = client.characters.create({"name": "test_character"})
test_character['name'] = 'test_character_updated'
test_character = client.characters.update({"name": "test_character_updated", "id": test_character.get("id")})
deleted = client.characters.delete(test_character.get('id'))
token = 'Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJhdWQiOiIxIiwianRpIjoiNjUxYzNkNDk1ZjVjZTUzMWQxMjc3MTk5Y2NlMzE1N2U4ZTFkMzZlOWRiYWZiOTY1ZGEyYmI5MTVkZjhkZDFkNTNkZGZlNDhmZTFmZWMzYjMiLCJpYXQiOjE2NDY0NTU3MDguMDA2Mjc4LCJuYmYiOjE2NDY0NTU3MDguMDA2MjgzLCJleHAiOjE2Nzc5OTE3MDcuOTk1NDY5LCJzdWIiOiIzMzM2MiIsInNjb3BlcyI6W119.BsK_qRFoPIlDnNG7DemtD_cVfN98LS-i3f9QUhfm_J7mS7_ltzuJ3typrPL_4lyqbnkrjjx0r5oICRqvgs902AmIDzt-bCGxsyesMWGQcQXFfoahGyJlYfRe4QSNsjlj3cLsM22dn0limMtnKB0I-7XcrbmNU15UJAN0MYJDOZ2pfCmjpn-5GnhgJQNwZrCZc33afUZSVvN_FAYT54GMPExMY0z1J1Zo49uUfs6FQhSG_SNrQ8zbPArCaGgH9hwMIEEhk0dn8-Kv-7SjJu1y4utWs3i9F08-WmIZ9YjDerJsrySc_N6TCgFn2GIeEnb_c-S3RpG4K3PMCTSrOGIKvy_S5zLYZOn6lNXaJ2RTaOhpZvHQHX_OeccoRJ5H9_K5ma1DXBPWaXgujCdaAi5S860ZRqsa8OUSQvHEsq03TNaOKupImBSKLGN6r3Qc57iBTfk6VrOIAO3cFG5Qej7t0gKQdpkDDPAK8dnLvC9QxrfKQCJcfwOrXz7dmUNb-XAKydU2brpqRzJyP3EScShrwPpYgXvE1BJNxtejpPhpE8GCM5TS6-qmHymHILYG0SsoM5HMrA70vFGu3DAJVkRzRavGEBsh_0mFzKR64zNT4hFFEzLyLha5c0FnkgKIFjUfZyrmskRW0t0DifJF5ZGX95PRezeNQHpRZ4yM5G3YseQ'
campaign = 'Journey to Morrivir'
kanka_client = KankaClient(token=token, campaign=campaign, verbose=True)
test_characters(kanka_client)
print()
# camp_id = 107538
# base_url = 'https://kanka.io/api/1.0/campaigns'
# char_url = '%s/%s/characters' % (base_url, camp_id)
# header = {'Authorization': token, 'Content-type': 'application/json'}
# result = requests.get(url=char_url, headers=header)
# if result.reason == 'OK':
# _characters = json.loads(result.text)['data']
# characters = list()
# for char in _characters:
# character = {
# "id" : char.get('id', None),
# "name" : char.get('name', None),
# "entry" : char.get('entry', None),
# "entry_parsed" : char.get('entry_parsed', None),
# "image" : char.get('image', None),
# "image_full" : char.get('image_full', None),
# "image_thumb" : char.get('image_thumb', None),
# "is_private" : char.get('is_private', None),
# "tags" : char.get('tags', []),
# "title" : char.get('title', None),
# "age" : char.get('age', None),
# "pronouns" : char.get('pronouns', None),
# "type" : char.get('type', None),
# "family_id" : char.get('family_id', None),
# "location_id" : char.get('location_id', None),
# "races" : char.get('races', []),
# "is_dead" : char.get('is_dead', None),
# "image_url" : char.get('image_url', None),
# "personality_name" : char.get('personality_name', []),
# "personality_entry" : char.get('personality_entry', []),
# "appearance_name" : char.get('appearance_name', []),
# "appearance_entry" : char.get('appearance_entry', []),
# "is_personality_visible" : char.get('is_personality_visible', None),
# }
# # Prep character for dump
# for field in character.copy():
# if character[field] == None or character[field] == []:
# del character[field]
# del character['id']
# characters.append(character)
# file = 'C:\\Users\\quazn\\Documents\\dev\\kanka-manager\\morrivir\\characters.yaml'
# code = write_data(file, characters)
# file_characters = read_data(file)
#print(file_characters)
| 46.233645
| 1,002
| 0.686275
| 410
| 0.082879
| 0
| 0
| 0
| 0
| 0
| 0
| 3,410
| 0.689307
|
548e7df7f685de5e09edd46875612218fa28a72f
| 1,788
|
py
|
Python
|
setup.py
|
m-aciek/python-sdk
|
ab447b58ae5f45ce2d5beb4bfc4d7063e42b4311
|
[
"MIT"
] | null | null | null |
setup.py
|
m-aciek/python-sdk
|
ab447b58ae5f45ce2d5beb4bfc4d7063e42b4311
|
[
"MIT"
] | null | null | null |
setup.py
|
m-aciek/python-sdk
|
ab447b58ae5f45ce2d5beb4bfc4d7063e42b4311
|
[
"MIT"
] | 2
|
2018-03-30T10:10:56.000Z
|
2018-05-25T09:27:36.000Z
|
#!/usr/bin/env python
import os
import re
import codecs
from setuptools import setup, find_packages
ground = os.path.abspath(os.path.dirname(__file__))
def read(filename):
with codecs.open(os.path.join(ground, filename), 'rb', 'utf-8') as file:
return file.read()
metadata = read(os.path.join(ground, 'hyperwallet', '__init__.py'))
def extract_metaitem(meta):
meta_match = re.search(r"""^__{meta}__\s+=\s+['\"]([^'\"]*)['\"]""".format(meta=meta), metadata, re.MULTILINE)
if meta_match:
return meta_match.group(1)
raise RuntimeError('Unable to find __{meta}__ string.'.format(meta=meta))
setup(
name = 'hyperwallet-sdk',
url = extract_metaitem('url'),
author = extract_metaitem('author'),
author_email = extract_metaitem('email'),
version = extract_metaitem('version'),
license = extract_metaitem('license'),
description = extract_metaitem('description'),
long_description = (read('README.rst') + '\n\n' +
read('CHANGELOG.rst')),
maintainer = extract_metaitem('author'),
maintainer_email = extract_metaitem('email'),
packages = find_packages(exclude = ('tests', 'doc')),
install_requires = ['requests', 'requests-toolbelt', 'jwcrypto', 'python-jose'],
test_suite = 'nose.collector',
tests_require = [ 'mock', 'nose'],
keywords='hyperwallet api',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Sphinx',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| 33.735849
| 114
| 0.636465
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 666
| 0.372483
|
548fac7398ada6cb536131133e9e9aa0af68eb01
| 7,850
|
py
|
Python
|
big-picture-spectra/big-picture-spectra.py
|
aibhleog/plotting-playground
|
84c19698e659de97c263362c7440faa3f873476e
|
[
"MIT"
] | null | null | null |
big-picture-spectra/big-picture-spectra.py
|
aibhleog/plotting-playground
|
84c19698e659de97c263362c7440faa3f873476e
|
[
"MIT"
] | null | null | null |
big-picture-spectra/big-picture-spectra.py
|
aibhleog/plotting-playground
|
84c19698e659de97c263362c7440faa3f873476e
|
[
"MIT"
] | null | null | null |
'''
This script makes an image very similar to Figure 2 of Hutchison et al. 2019 (https://arxiv.org/pdf/1905.08812.pdf). Undoubtedly, there are likely simpler ways to make this figure -- this is how I chose to code it up.
Because the figure in the paper uses some proprietary data, the code below will generate fake data to be plotted.
Credit: Taylor Hutchison
aibhleog@tamu.edu
Texas A&M University
'''
_author_ = 'Taylor Hutchison'
import numpy as np
import matplotlib.pyplot as plt
import astropy.io.fits as fits
import matplotlib.gridspec as gridspec
from matplotlib.patches import Polygon
import matplotlib.patheffects as PathEffects
from mpl_toolkits.axes_grid.inset_locator import inset_axes
from matplotlib.lines import Line2D
from matplotlib import patches
# -- Generating fake data -- #
# -------------------------- #
np.random.seed(seed=3) # fixing the random seed so we can get the same result
gauss2d = np.loadtxt('gaussian2D_sig2_kernel7.txt') # fake 1D emission line
gauss1d = np.loadtxt('gaussian1D_sig2_kernel7.txt') # fake 2D emission line
# 1D & 2D gaussian pulled from here (because it's faster for this exercise):
# http://dev.theomader.com/gaussian-kernel-calculator/
noise1d = np.random.uniform(-1,1,250) # noise for 1D spectrum
noise2d = np.random.uniform(-1,1,(250,70)) # noise for 2D spectrum
shape = noise2d.shape
xcen, ycen = int(shape[0]/2), int(shape[1]/2)
galspec2d_line1 = noise2d.copy()
galspec2d_line1[xcen-3:xcen+4,ycen-3:ycen+4] += gauss2d * 35 # 2D emission line
galspec1d_line1 = noise1d.copy()
galspec1d_line1[xcen-3:xcen+4] += gauss1d * 15 # Lya 1D emission line
galspec2d_line2 = galspec2d_line1.copy()
galspec2d_line2[xcen+17:xcen+24,ycen-3:ycen+4] += gauss2d * 35 # 2D emission line
galspec1d_line2 = galspec1d_line1.copy()
galspec1d_line2[xcen+17:xcen+24] += gauss1d * 10 # CIII] 1D doublet emission line
noisegal = np.random.uniform(-1,1,(50,35)) # noise for photometry of 'galaxy'
galaxy = noisegal.copy()
galaxy[22:29,13:20] += gauss2d * 25 # add signal for galaxy shape
galaxy[24:31,16:23] += gauss2d * 25 # add signal for galaxy shape
wavelength = np.arange(len(galspec1d_line1)) # fake wavelength range
# fake errors
np.random.seed(seed=13) # fixing the random seed so we can get the same result
error1d = np.random.random(len(noise1d)) + 0.4
# ---------------------------#
# -- Initializing the image -- #
# ---------------------------- #
f = plt.figure(figsize=(10.5,9))
gs0 = gridspec.GridSpec(2,1,height_ratios=[1,0.9],hspace=0.1) # the main subplots
# ------------- #
# -- TOP ROW -- #
# ------------- #
gs01 = gridspec.GridSpecFromSubplotSpec(1,2,subplot_spec=gs0[0], # the top panel's subplots
width_ratios=[1.2,2],wspace=0.22)
# --> RIGHT SIDE: the Lya spectrum
line = 'lya'
band = 'Y'
# The subplot gs001 is made up of 3 subplots where the top and bottom are just used to
# center the middle one more accurately -- they aren't necessary if you don't care THAT much :)
gs001 = gridspec.GridSpecFromSubplotSpec(3,1,subplot_spec=gs01[1],
height_ratios=[0.05,1,0.12],hspace=0.0)
# This is the real subplot for the data (the middle one from gs001), split into 2 subplots
# so that we can have the 2D spectrum on top and the 1D on the bottom
gs011 = gridspec.GridSpecFromSubplotSpec(2,1,subplot_spec=gs001[1],
height_ratios=[1.25,2],hspace=0.0)
# 2D spectrum
ax01 = plt.Subplot(f, gs011[0])
ax01.imshow(galspec2d_line1[75:175,28:42].T, # zooming in for the sake of the example
aspect='auto',origin='lower',cmap='gray',clim=(-1.5,2.3))
# removing the tickmarks and labels for the 2D spectrum
ax01.xaxis.set_ticks_position('none')
ax01.yaxis.set_ticks_position('none')
ax01.set_yticklabels([])
ax01.set_xticklabels([])
# white text with black outline
txt = ax01.text(0.023,0.73,'%s-band'%(band), size=20.5, color='w',transform=ax01.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground='k')])
f.add_subplot(ax01) # adds the subplot to the image
# 1D spectrum
ax02 = plt.Subplot(f, gs011[1])
ax02.step(wavelength,galspec1d_line1,where='mid',lw=2.3)
ax02.fill_between(wavelength,error1d,error1d*-1,alpha=0.2)
ax02.set_xlim(wavelength[74],wavelength[174])
ax02.set_ylabel(r'F$_{\lambda}$ [10$^{-18}$ erg/s/cm$^2$/$\AA$]',fontsize=16)
ax02.set_xlabel('observed wavelength [microns]',labelpad=5,fontsize=16)
f.add_subplot(ax02) # adds the subplot to the image
# --> LEFT SIDE: F160W STAMP
gs002 = gridspec.GridSpecFromSubplotSpec(1,1,subplot_spec=gs01[0])
ax002 = plt.Subplot(f, gs002[0]) # no need to add extra tiny subplots for padding here!
ax002.imshow(galaxy,aspect='auto',origin='upper',cmap='gray',clim=(-1,2))
# removing the tickmarks and labels for the 2D spectrum
ax002.xaxis.set_ticks_position('none')
ax002.yaxis.set_ticks_position('none')
ax002.set_yticklabels([])
ax002.set_xticklabels([])
# white text with black outline
txt = ax002.text(0.03,0.90,'F160W',ha='left',size=22.5, color='w',transform=ax002.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground='k')])
# adding years for the slit layouts, using the set_path_effects to "bold" the text
txt = ax002.text(0.04,0.13,'2016',size=19.5, color='#CF6060',transform=ax002.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=1.18, foreground='#CF6060')])
txt = ax002.text(0.04,0.22,'2014',size=19.5, color='#F4D03F',transform=ax002.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=1.18, foreground='#F4D03F')])
txt = ax002.text(0.04,0.04,'2017',size=19.5, color='#70B5E3',transform=ax002.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=1.18, foreground='#70B5E3')])
# plotting slits over the regions in the image
# loc: 2, 3, 4, 1
ax002.add_patch(Polygon([[7,7],[22,45],[25.5,43],[11,5]], # 2016 slit
zorder=3,facecolor='none',lw=1.8,edgecolor='#CF6060'))
ax002.add_patch(Polygon([[15,5],[15,45],[20,45],[20,5]], # 2014 slit
zorder=3,facecolor='none',lw=1.8,edgecolor='#F4D03F'))
ax002.add_patch(Polygon([[5,23],[5,28],[28,28],[28,23]], # 2017 slit
zorder=3,facecolor='none',lw=1.8,edgecolor='#70B5E3'))
f.add_subplot(ax002) # adds the subplot to the figure
# ------------------------------------------------------------------------- #
# ---------------- #
# -- BOTTOM ROW -- #
# ---------------- #
# --> the CIII] spectrum
line = 'ciii'
band = 'H'
# similar padding process done as with the Lya spectrum (where only the middle one matters)
gs02 = gridspec.GridSpecFromSubplotSpec(1,3,subplot_spec=gs0[1],width_ratios=[0.28,2,0.13],wspace=0.0)
# splitting the middle subplot from above into two, so that we can have 2D on top and 1D on bottom
gs003 = gridspec.GridSpecFromSubplotSpec(2,1,subplot_spec=gs02[1],height_ratios=[1.75,2],hspace=0.0)
# 2D spectrum
ax21 = plt.Subplot(f, gs003[0])
ax21.imshow(galspec2d_line2[:,15:55].T,aspect='auto',origin='lower',cmap='gray',clim=(-1.5,2.2))
# removing the tickmarks and labels for the 2D spectrum
ax21.xaxis.set_ticks_position('none')
ax21.yaxis.set_ticks_position('none')
ax21.set_yticklabels([])
ax21.set_xticklabels([])
# white text with black outline
txt = ax21.text(0.02,0.75,'%s-band'%(band), size=16+8.5, color='w',transform=ax21.transAxes)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground='k')])
f.add_subplot(ax21) # adds subplot to the figure
# 1D spectrum
ax22 = plt.Subplot(f, gs003[1])
ax22.step(wavelength,galspec1d_line2,where='mid',lw=2.7)
ax22.fill_between(wavelength,error1d,error1d*-1,alpha=0.2)
ax22.set_xlim(wavelength[0],wavelength[-1])
ax22.set_ylabel(r'F$_{\lambda}$ [10$^{-19}$ erg/s/cm$^{2}$/$\AA$]',fontsize=16)
ax22.set_xlabel('observed wavelength [microns]',fontsize=16)
f.add_subplot(ax22) # adds subplot to the figure
# saving figure
plt.savefig('figure.pdf')
#plt.show()
plt.close('all')
| 39.25
| 217
| 0.707771
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 3,276
| 0.417325
|
54902b07fce1f2bf2bcf246ab039ab703861aaf3
| 8,517
|
py
|
Python
|
pesummary/core/plots/corner.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 1
|
2021-08-03T05:58:20.000Z
|
2021-08-03T05:58:20.000Z
|
pesummary/core/plots/corner.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 1
|
2020-06-13T13:29:35.000Z
|
2020-06-15T12:45:04.000Z
|
pesummary/core/plots/corner.py
|
pesummary/pesummary
|
99e3c450ecbcaf5a23564d329bdf6e0080f6f2a8
|
[
"MIT"
] | 3
|
2021-07-08T08:31:28.000Z
|
2022-03-31T14:08:58.000Z
|
# Licensed under an MIT style license -- see LICENSE.md
import numpy as np
from scipy.stats import gaussian_kde
from matplotlib.colors import LinearSegmentedColormap, colorConverter
import corner
__author__ = ["Charlie Hoy <charlie.hoy@ligo.org>"]
def _set_xlim(new_fig, ax, new_xlim):
if new_fig:
return ax.set_xlim(new_xlim)
xlim = ax.get_xlim()
return ax.set_xlim([min(xlim[0], new_xlim[0]), max(xlim[1], new_xlim[1])])
def _set_ylim(new_fig, ax, new_ylim):
if new_fig:
return ax.set_ylim(new_ylim)
ylim = ax.get_ylim()
return ax.set_ylim([min(ylim[0], new_ylim[0]), max(ylim[1], new_ylim[1])])
def hist2d(
x, y, bins=20, range=None, weights=None, levels=None, smooth=None, ax=None,
color=None, quiet=False, plot_datapoints=True, plot_density=True,
plot_contours=True, no_fill_contours=False, fill_contours=False,
contour_kwargs=None, contourf_kwargs=None, data_kwargs=None,
pcolor_kwargs=None, new_fig=True, kde=None, kde_kwargs={},
density_cmap=None, label=None, grid=True, **kwargs
):
"""Extension of the corner.hist2d function. Allows the user to specify the
kde used when estimating the 2d probability density
Parameters
----------
x : array_like[nsamples,]
The samples.
y : array_like[nsamples,]
The samples.
quiet : bool
If true, suppress warnings for small datasets.
levels : array_like
The contour levels to draw.
ax : matplotlib.Axes
A axes instance on which to add the 2-D histogram.
plot_datapoints : bool
Draw the individual data points.
plot_density : bool
Draw the density colormap.
plot_contours : bool
Draw the contours.
no_fill_contours : bool
Add no filling at all to the contours (unlike setting
``fill_contours=False``, which still adds a white fill at the densest
points).
fill_contours : bool
Fill the contours.
contour_kwargs : dict
Any additional keyword arguments to pass to the `contour` method.
contourf_kwargs : dict
Any additional keyword arguments to pass to the `contourf` method.
data_kwargs : dict
Any additional keyword arguments to pass to the `plot` method when
adding the individual data points.
pcolor_kwargs : dict
Any additional keyword arguments to pass to the `pcolor` method when
adding the density colormap.
kde: func, optional
KDE you wish to use to work out the 2d probability density
kde_kwargs: dict, optional
kwargs passed directly to kde
"""
x = np.asarray(x)
y = np.asarray(y)
if kde is None:
kde = gaussian_kde
if ax is None:
raise ValueError("Please provide an axis to plot")
# Set the default range based on the data range if not provided.
if range is None:
range = [[x.min(), x.max()], [y.min(), y.max()]]
# Set up the default plotting arguments.
if color is None:
color = "k"
# Choose the default "sigma" contour levels.
if levels is None:
levels = 1.0 - np.exp(-0.5 * np.arange(0.5, 2.1, 0.5) ** 2)
# This is the color map for the density plot, over-plotted to indicate the
# density of the points near the center.
if density_cmap is None:
density_cmap = LinearSegmentedColormap.from_list(
"density_cmap", [color, (1, 1, 1, 0)]
)
elif isinstance(density_cmap, str):
from matplotlib import cm
density_cmap = cm.get_cmap(density_cmap)
# This color map is used to hide the points at the high density areas.
white_cmap = LinearSegmentedColormap.from_list(
"white_cmap", [(1, 1, 1), (1, 1, 1)], N=2
)
# This "color map" is the list of colors for the contour levels if the
# contours are filled.
rgba_color = colorConverter.to_rgba(color)
contour_cmap = [list(rgba_color) for l in levels] + [rgba_color]
for i, l in enumerate(levels):
contour_cmap[i][-1] *= float(i) / (len(levels) + 1)
# We'll make the 2D histogram to directly estimate the density.
try:
_, X, Y = np.histogram2d(
x.flatten(),
y.flatten(),
bins=bins,
range=list(map(np.sort, range)),
weights=weights,
)
except ValueError:
raise ValueError(
"It looks like at least one of your sample columns "
"have no dynamic range. You could try using the "
"'range' argument."
)
values = np.vstack([x.flatten(), y.flatten()])
kernel = kde(values, **kde_kwargs)
xmin, xmax = np.min(x.flatten()), np.max(x.flatten())
ymin, ymax = np.min(y.flatten()), np.max(y.flatten())
X, Y = np.meshgrid(X, Y)
pts = np.vstack([X.ravel(), Y.ravel()])
z = kernel(pts)
H = z.reshape(X.shape)
if smooth is not None:
if kde_kwargs.get("transform", None) is not None:
from pesummary.utils.utils import logger
logger.warning(
"Smoothing PDF. This may give unwanted effects especially near "
"any boundaries"
)
try:
from scipy.ndimage import gaussian_filter
except ImportError:
raise ImportError("Please install scipy for smoothing")
H = gaussian_filter(H, smooth)
if plot_contours or plot_density:
pass
if kde_kwargs is None:
kde_kwargs = dict()
if contour_kwargs is None:
contour_kwargs = dict()
if plot_datapoints:
if data_kwargs is None:
data_kwargs = dict()
data_kwargs["color"] = data_kwargs.get("color", color)
data_kwargs["ms"] = data_kwargs.get("ms", 2.0)
data_kwargs["mec"] = data_kwargs.get("mec", "none")
data_kwargs["alpha"] = data_kwargs.get("alpha", 0.1)
ax.plot(x, y, "o", zorder=-1, rasterized=True, **data_kwargs)
# Plot the base fill to hide the densest data points.
cs = ax.contour(
X, Y, H, levels=(1 - np.array(levels)) * np.max(H), alpha=0.
)
contour_set = []
for _contour in cs.collections:
_contour_set = []
for _path in _contour.get_paths():
data = _path.vertices
transpose = data.T
for idx, axis in enumerate(["x", "y"]):
limits = [
kde_kwargs.get("{}low".format(axis), -np.inf),
kde_kwargs.get("{}high".format(axis), np.inf)
]
if kde_kwargs.get("transform", None) is None:
if limits[0] is not None:
transpose[idx][
np.argwhere(transpose[idx] < limits[0])
] = limits[0]
if limits[1] is not None:
transpose[idx][
np.argwhere(transpose[idx] > limits[1])
] = limits[1]
else:
_transform = kde_kwargs["transform"](transpose)
_contour_set.append(transpose)
contour_set.append(_contour_set)
# Plot the density map. This can't be plotted at the same time as the
# contour fills.
if plot_density:
if pcolor_kwargs is None:
pcolor_kwargs = dict()
pcolor_kwargs["shading"] = "auto"
ax.pcolor(X, Y, np.max(H) - H, cmap=density_cmap, **pcolor_kwargs)
# Plot the contour edge colors.
if plot_contours:
colors = contour_kwargs.pop("colors", color)
linestyles = kwargs.pop("linestyles", "-")
_list = [colors, linestyles]
for num, (prop, default) in enumerate(zip(_list, ['k', '-'])):
if prop is None:
_list[num] = default * len(contour_set)
elif isinstance(prop, str):
_list[num] = [prop] * len(contour_set)
elif len(prop) < len(contour_set):
raise ValueError(
"Please provide a color/linestyle for each contour"
)
for idx, _contour in enumerate(contour_set):
for _idx, _path in enumerate(_contour):
if idx == 0 and _idx == 0:
_label = label
else:
_label = None
ax.plot(
*_path, color=_list[0][idx], label=_label,
linestyle=_list[1][idx]
)
_set_xlim(new_fig, ax, range[0])
_set_ylim(new_fig, ax, range[1])
| 36.242553
| 80
| 0.589996
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,791
| 0.327698
|
549070123669b37704f083b9611ce10258a9d787
| 2,240
|
py
|
Python
|
tests/test_tokenizer.py
|
mkartawijaya/dango
|
9cc9d498c4eac851d6baa96ced528c1d91a87216
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_tokenizer.py
|
mkartawijaya/dango
|
9cc9d498c4eac851d6baa96ced528c1d91a87216
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_tokenizer.py
|
mkartawijaya/dango
|
9cc9d498c4eac851d6baa96ced528c1d91a87216
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import List
import pytest
import dango
def test_empty_phrase():
assert dango.tokenize('') == [], 'an empty phrase contains no tokens'
@pytest.mark.parametrize('expected', [
# inflected verbs should be kept as one word
['昨日', '映画', 'を', '見ました'],
['私', 'は', '本', 'を', '読む'],
['私', 'は', '本', 'を', '読まない'],
['私', 'は', '本', 'を', '読んだ'],
['私', 'は', '本', 'を', '読まなかった'],
['私', 'は', '本', 'を', '読みます'],
['私', 'は', '本', 'を', '読みました'],
['私', 'は', '本', 'を', '読みません'],
['私', 'は', '本', 'を', '読みませんでした'],
['東京', 'に', '住んでいる'],
['東京', 'に', '住んでる'],
['東京', 'に', '住んでいます'],
['東京', 'に', '住んでます'],
['この', '店', 'は', 'まだ', '開いていない'],
['この', '店', 'は', 'まだ', '開いてない'],
['この', '店', 'は', 'まだ', '開いていません'],
['この', '店', 'は', 'まだ', '開いてません'],
['ラーメン', 'を', '作ってみた'],
# inflected adjectives should be kept as one word as well
['この', 'ビル', 'は', '高い'],
['この', 'ビル', 'は', '高くない'],
['この', 'ビル', 'は', '高かった'],
['この', 'ビル', 'は', '高くなかった'],
# seems/looks-like suffixes should be kept with their verb/adjective
['その', 'ケーキ', 'は', 'おいしそう'],
['明日', '雨', 'が', '降りそう']
], ids=lambda e: ''.join(e))
def test_tokenize(expected: List[str]):
assert [w.surface for w in dango.tokenize(''.join(expected))] == expected
# Since extracting the reading of the dictionary form depends on knowledge
# of the internal workings of SudachiPy we treat this functionality as a
# black box and just perform a smoke test if we get some plausible output.
# This test could break depending on the dictionary used as the readings
# for the words might change.
@pytest.mark.parametrize(['phrase', 'expected'], [
('昨日映画を見ました', ['きのう', 'えいが', 'を', 'みる']),
('私はその本を読んだ', ['わたくし', 'は', 'その', 'ほん', 'を', 'よむ']),
('東京に住んでいます', ['とうきょう', 'に', 'すむ']),
('この店はまだ開いてない', ['この', 'みせ', 'は', 'まだ', 'ひらく']),
('ラーメンを作ってみた', ['らーめん', 'を', 'つくる']),
('このビルは高くなかった', ['この', 'びる', 'は', 'たかい']),
('そのケーキはおいしそう', ['その', 'けーき', 'は', 'おいしい']),
('明日雨が降りそう', ['あす', 'あめ', 'が', 'ふる'])
], ids=lambda e: ''.join(e))
def test_dictionary_form_reading(phrase: str, expected: List[str]):
assert [w.dictionary_form_reading for w in dango.tokenize(phrase)] == expected
| 36.129032
| 82
| 0.525446
| 0
| 0
| 0
| 0
| 2,500
| 0.83724
| 0
| 0
| 1,969
| 0.659411
|
5490a142b6dfe4a57805f7133f0d2ea9a4a1539c
| 2,829
|
py
|
Python
|
neutron_lib/db/sqlalchemytypes.py
|
rolaya/neutron-lib
|
41a2226dfb93a0e6138de260f5126fa7c954178c
|
[
"Apache-2.0"
] | null | null | null |
neutron_lib/db/sqlalchemytypes.py
|
rolaya/neutron-lib
|
41a2226dfb93a0e6138de260f5126fa7c954178c
|
[
"Apache-2.0"
] | null | null | null |
neutron_lib/db/sqlalchemytypes.py
|
rolaya/neutron-lib
|
41a2226dfb93a0e6138de260f5126fa7c954178c
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Custom SQLAlchemy types."""
import netaddr
from sqlalchemy import types
from neutron_lib._i18n import _
class IPAddress(types.TypeDecorator):
impl = types.String(64)
def process_result_value(self, value, dialect):
return netaddr.IPAddress(value)
def process_bind_param(self, value, dialect):
if not isinstance(value, netaddr.IPAddress):
raise AttributeError(_("Received type '%(type)s' and value "
"'%(value)s'. Expecting netaddr.IPAddress "
"type.") % {'type': type(value),
'value': value})
return str(value)
class CIDR(types.TypeDecorator):
impl = types.String(64)
def process_result_value(self, value, dialect):
return netaddr.IPNetwork(value)
def process_bind_param(self, value, dialect):
if not isinstance(value, netaddr.IPNetwork):
raise AttributeError(_("Received type '%(type)s' and value "
"'%(value)s'. Expecting netaddr.IPNetwork "
"type.") % {'type': type(value),
'value': value})
return str(value)
class MACAddress(types.TypeDecorator):
impl = types.String(64)
def process_result_value(self, value, dialect):
return netaddr.EUI(value)
def process_bind_param(self, value, dialect):
if not isinstance(value, netaddr.EUI):
raise AttributeError(_("Received type '%(type)s' and value "
"'%(value)s'. Expecting netaddr.EUI "
"type.") % {'type': type(value),
'value': value})
return str(value)
class TruncatedDateTime(types.TypeDecorator):
"""Truncates microseconds.
Use this for datetime fields so we don't have to worry about DB-specific
behavior when it comes to rounding/truncating microseconds off of
timestamps.
"""
impl = types.DateTime
def process_bind_param(self, value, dialect):
return value.replace(microsecond=0) if value else value
process_result_value = process_bind_param
| 33.678571
| 78
| 0.607282
| 2,135
| 0.754684
| 0
| 0
| 0
| 0
| 0
| 0
| 1,083
| 0.382821
|
5491d3f5c105c58d0e54d67614d6a8faed7a1e75
| 256
|
py
|
Python
|
Algorithm/Array/217. Contains Duplicate.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Array/217. Contains Duplicate.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
Algorithm/Array/217. Contains Duplicate.py
|
smsubham/Data-Structure-Algorithms-Questions
|
45da68231907068ef4e4a0444ffdac69b337fa7c
|
[
"Apache-2.0"
] | null | null | null |
# https://leetcode.com/problems/contains-duplicate/
# We are forming whole set always which isn't optimal though time complexity is O(n).
class Solution:
def containsDuplicate(self, nums: List[int]) -> bool:
return len(nums) != len(set(nums))
| 36.571429
| 85
| 0.710938
| 116
| 0.453125
| 0
| 0
| 0
| 0
| 0
| 0
| 136
| 0.53125
|
54940d248d43c1725fcc0fa869fadb3c0a38e2a1
| 1,488
|
py
|
Python
|
script/check_conf_whitelist.py
|
Kaiyuan-Zhang/Gravel-public
|
ff3f7dc7d5ac63d91e26f03ae4e49a7451c6cb22
|
[
"MIT"
] | 4
|
2020-04-11T19:11:25.000Z
|
2021-02-06T10:46:39.000Z
|
script/check_conf_whitelist.py
|
Kaiyuan-Zhang/Gravel-public
|
ff3f7dc7d5ac63d91e26f03ae4e49a7451c6cb22
|
[
"MIT"
] | 1
|
2021-11-01T20:19:23.000Z
|
2021-11-01T20:19:43.000Z
|
script/check_conf_whitelist.py
|
Kaiyuan-Zhang/Gravel-public
|
ff3f7dc7d5ac63d91e26f03ae4e49a7451c6cb22
|
[
"MIT"
] | 1
|
2020-04-18T03:36:03.000Z
|
2020-04-18T03:36:03.000Z
|
import sys
import os
if __name__ == '__main__':
if len(sys.argv) < 3:
print("Usage: {} <conf-list> <conf-dir> [white-list-files]".format(sys.argv[0]))
sys.exit(-1)
conf_list_file = sys.argv[1]
conf_dir = sys.argv[2]
conf_list = {}
white_list_files = sys.argv[3:]
ele_white_list = set()
for fn in white_list_files:
with open(fn, 'r') as f:
lines = f.readlines()
for l in lines:
ele_white_list.add(l.rstrip())
with open(conf_list_file, 'r') as f:
lines = f.readlines()
for l in lines:
fn = os.path.join(conf_dir, l.rstrip())
with open(fn, 'r') as conf_f:
elements = conf_f.readlines()
conf_list[l] = list(map(lambda s: s.rstrip(), elements))
offensive = {}
supported = []
for conf, eles in conf_list.items():
can_not_run = False
for e in eles:
if e not in ele_white_list:
can_not_run = True
if e not in offensive:
offensive[e] = 0
offensive[e] += 1
if not can_not_run:
supported.append(conf)
ratio = float(len(supported)) / float(len(conf_list.keys())) * 100.0
sorted_eles = sorted(offensive.items(), key = lambda x : x[1])
print("Support {} / {} ({}%) Confs".format(len(supported), len(conf_list.keys()), ratio))
for e in sorted_eles[::-1]:
print(e[0], e[1])
| 31.659574
| 93
| 0.536962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 101
| 0.067876
|
54944c0a9b4c84df76cbc3d9fc9c516394ab50a2
| 4,383
|
py
|
Python
|
models/joint_inference_model.py
|
pnsuau/neurips18_hierchical_image_manipulation
|
712ff8008f8d4c38626bd556fc44adfbcde8fa28
|
[
"MIT"
] | null | null | null |
models/joint_inference_model.py
|
pnsuau/neurips18_hierchical_image_manipulation
|
712ff8008f8d4c38626bd556fc44adfbcde8fa28
|
[
"MIT"
] | null | null | null |
models/joint_inference_model.py
|
pnsuau/neurips18_hierchical_image_manipulation
|
712ff8008f8d4c38626bd556fc44adfbcde8fa28
|
[
"MIT"
] | null | null | null |
import torch
from torch.autograd import Variable
from util.util import *
from util.data_util import *
import numpy as np
from PIL import Image
from data.base_dataset import get_transform_params, get_raw_transform_fn, \
get_transform_fn, get_soft_bbox, get_masked_image
from util.data_util import crop_canvas, paste_canvas
class JointInference():
def __init__(self, joint_opt):
###########################
# Argument Parsing
###########################
from options.box2mask_test_options import BoxToMaskTestOptions as MaskGenTestOption
from options.mask2image_test_options import MaskToImageTestOptions as ImgGenTestOption
#print('++++++++++++++++++++++++MaskGenTestOption',MaskGenTestOption)
self.opt_maskgen = load_script_to_opt(joint_opt.maskgen_script, MaskGenTestOption)
self.opt_imggen = load_script_to_opt(joint_opt.imggen_script, ImgGenTestOption)
# TODO(sh): make this part less hacky
self.opt_maskgen.gpu_ids = self.opt_imggen.gpu_ids = joint_opt.gpu_ids
###########################
# Model Initialization
###########################
from .models import create_model
self.G_box2mask = create_model(self.opt_maskgen)
self.G_mask2img = create_model(self.opt_imggen)
def sample_bbox(self, bbox_originals, opt, random=False):
candidate_list = []
# sample object based on size
for bbox in bbox_originals:
cls = bbox['cls']
xmin = bbox['bbox'][0]
ymin = bbox['bbox'][1]
xmax = bbox['bbox'][2]
ymax = bbox['bbox'][3]
box_w, box_h = xmax - xmin, ymax - ymin
min_axis = min(box_w, box_h)
max_axis = max(box_w, box_h)
if max_axis < opt.min_box_size:
continue
candidate_list.append(bbox)
if not random and len(candidate_list) > 0:
# Sample from bbox within size limit
return np.random.choice(candidate_list)
else:
# Random sample
return np.random.choice(bbox_originals)
def sample_window(self, img, label, bbox_sampled):
pass
def normalize_input(self, img, label, normalize_image=False):
tnfm_image_raw = get_raw_transform_fn(normalize=normalize_image)
tnfm_label_raw = get_raw_transform_fn(normalize=False)
return tnfm_image_raw(img), tnfm_label_raw(label) * 255.0
def gen_layout(self, bbox_sampled, label_original, opt):
# crop canvas
input_dict = crop_canvas(bbox_sampled, label_original, opt)
# generate layout
with torch.no_grad():
label_generated = self.G_box2mask.evaluate({
'label_map': Variable(input_dict['label']),
'mask_ctx_in': Variable(input_dict['mask_ctx_in']),
'mask_out': Variable(input_dict['mask_out']),
'mask_in': Variable(input_dict['mask_in']),
'cls': Variable(input_dict['cls']),
'label_map_orig': Variable(input_dict['label_orig']),
'mask_ctx_in_orig': Variable(input_dict['mask_ctx_in_orig']),
'mask_out_orig': Variable(input_dict['mask_out_orig'])
}, target_size=(input_dict['label_orig'].size()[2:4]))
# paste canvas
label_canvas = paste_canvas(label_original, label_generated.data, \
input_dict, resize=False)
return label_canvas, input_dict, label_generated.data
def gen_image(self, bbox_sampled, img_original, label_generated, opt):
# crop canvas
input_dict = crop_canvas(bbox_sampled, label_generated, opt, \
img_original=img_original, transform_img=True)
# generate layout
with torch.no_grad():
img_generated = self.G_mask2img.inference(
Variable(input_dict['label']),
Variable(torch.zeros_like(input_dict['label'])),
Variable(input_dict['image']),
Variable(input_dict['mask_in']),
Variable(input_dict['mask_out'])
)
# paste canvas
img_canvas = paste_canvas(img_original, (img_generated.data+1)/2, \
input_dict, method=Image.BICUBIC, is_img=True)
return img_canvas, input_dict, img_generated.data
| 42.553398
| 94
| 0.620123
| 4,029
| 0.919233
| 0
| 0
| 0
| 0
| 0
| 0
| 689
| 0.157198
|
549626fa07a7cc95e2aa2428a235bbc1adf539d5
| 2,102
|
py
|
Python
|
solutions/051_n_queens.py
|
abawchen/leetcode
|
41d3b172a7694a46a860fbcb0565a3acccd000f2
|
[
"MIT"
] | null | null | null |
solutions/051_n_queens.py
|
abawchen/leetcode
|
41d3b172a7694a46a860fbcb0565a3acccd000f2
|
[
"MIT"
] | null | null | null |
solutions/051_n_queens.py
|
abawchen/leetcode
|
41d3b172a7694a46a860fbcb0565a3acccd000f2
|
[
"MIT"
] | null | null | null |
class Solution:
# @return a list of lists of string
def solveNQueens(self, n):
board = [[1 for i in xrange(n)] for i in xrange(n)]
rs = range(n)
self.queens = []
self.directions = [[(-i, i), (i, i)] for i in xrange(1, n)]
self.recursive(board, n, 0, rs)
return self.queens
def recursive(self, wb, n, c, rs):
for r in rs:
if wb[r][c] == 1:
wb, marks = self.mark(wb, n, (r, c))
if c == n-1:
self.queens.append(map(lambda q: ''.join(map(lambda x: 'Q' if x == 0 else '.', q)), wb))
else:
nrs = rs[:]
nrs.remove(r)
self.recursive(wb, n, c+1, nrs)
wb = self.unmark(wb, marks)
def mark(self, board, n, (x, y)):
marks = []
for (a, b) in [(x, c) for c in range(y, n)]:
if board[a][b] != -1:
board[a][b] = -1
marks.append((a, b))
for d in self.directions[:len(self.directions)-y]:
for (a, b) in map(lambda s: (x+s[0], y+s[1]), d):
if a >= 0 and a < n and b >= 0 and b < n and board[a][b] != -1:
board[a][b] = -1
marks.append((a, b))
board[x][y] = 0
return board, marks
def unmark(self, board, marks):
for (x, y) in marks:
board[x][y] = 1
return board
import time
start_time = time.time()
s = Solution()
print s.solveNQueens(1)
print s.solveNQueens(2)
print s.solveNQueens(3)
print (4, s.solveNQueens(4))
print (5, len(s.solveNQueens(5)))
print (6, len(s.solveNQueens(6)))
print (7, len(s.solveNQueens(7)))
print (8, len(s.solveNQueens(8)))
print (9, len(s.solveNQueens(9)))
print (10, len(s.solveNQueens(10)))
print (11, len(s.solveNQueens(11)))
print("--- %s seconds ---" % (time.time() - start_time))
# s.solveNQueens(4)
# qs = s.solveNQueens(5)
# for q in qs:
# print "-------------------"
# for r in q:
# print r
# print "-------------------"
| 28.794521
| 108
| 0.471456
| 1,479
| 0.703616
| 0
| 0
| 0
| 0
| 0
| 0
| 220
| 0.104662
|
5497a6164438dad00ba23076949d1e3d84fd4868
| 3,812
|
py
|
Python
|
tests/v2/parties/test_parties.py
|
jama5262/Politico
|
7292f604723cf115004851b9767688cf1a956bb1
|
[
"MIT"
] | null | null | null |
tests/v2/parties/test_parties.py
|
jama5262/Politico
|
7292f604723cf115004851b9767688cf1a956bb1
|
[
"MIT"
] | 2
|
2019-02-19T12:43:32.000Z
|
2019-03-04T16:15:38.000Z
|
tests/v2/parties/test_parties.py
|
jama5262/Politico
|
7292f604723cf115004851b9767688cf1a956bb1
|
[
"MIT"
] | null | null | null |
import unittest
import json
from app import createApp
from app.api.database.migrations.migrations import migrate
class TestParties(unittest.TestCase):
def setUp(self):
self.app = createApp("testing")
self.client = self.app.test_client()
self.endpoint = "/api/v2/parties"
self.partyID = 3
self.data = {
"name": "Party Name",
"abbr": "Party Abbreviation",
"logo_url": "http://logo/url",
"hq_address": "Party HQ"
}
self.dataUpdate = {
"name": "Updated Party Name",
"abbr": "Updated Party Abbreviation",
"logo_url": "http://logo/url",
"hq_address": "Updated Party HQ"
}
self.dataNoNameProperty = {
"abbr": "Updated Party Abbreviation",
"logo_url": "http://logo/url",
"hq_address": "Updated Party HQ"
}
self.dataEmptyValues = {
"name": "",
"abbr": "",
"logo_url": "",
"hq_address": ""
}
self.loginData = {
"email": "admin@gmail.com",
"password": "adminpass"
}
def tearDown(self):
migrate()
def loginUser(self):
response = self.client.post(path="/api/v2/auth/login", data=json.dumps(self.loginData), content_type='application/json')
token = response.json["data"]["token"]
return {
"Authorization": "Bearer " + token
}
def post(self, path, data):
return self.client.post(path=path, data=json.dumps(data), content_type='application/json', headers=self.loginUser())
def get(self, path):
return self.client.get(path=path, content_type='application/json', headers=self.loginUser())
def patch(self, path, data):
return self.client.patch(path=path, data=json.dumps(data), content_type='application/json', headers=self.loginUser())
def delete(self, path):
return self.client.delete(path=path, content_type='application/json', headers=self.loginUser())
def test_create_party(self):
response = self.post(self.endpoint, self.data)
self.assertEqual(response.status_code, 200, response)
def test_get_all_parties(self):
response = self.get(self.endpoint)
self.assertEqual(response.status_code, 200)
def test_get_specific_party(self):
postParty = self.post(self.endpoint, self.data)
response = self.get(self.endpoint + "/" + str(self.partyID))
self.assertEqual(response.status_code, 200)
def test_get_specific_party_not_found(self):
response = self.get(self.endpoint + "/2000")
self.assertEqual(response.status_code, 404)
def test_edit_specific_party(self):
postParty = self.post(self.endpoint, self.data)
response = self.patch(self.endpoint + "/" + str(self.partyID), self.dataUpdate)
self.assertEqual(response.status_code, 200)
def test_edit_specific_party_not_found(self):
response = self.patch(self.endpoint + "/2000", self.dataUpdate)
self.assertEqual(response.status_code, 404)
def test_delete_specific_party(self):
postParty = self.post(self.endpoint, self.data)
response = self.delete(self.endpoint + "/" + str(self.partyID))
self.assertEqual(response.status_code, 200)
def test_delete_specific_party_not_found(self):
response = self.delete(self.endpoint + "/2000")
self.assertEqual(response.status_code, 404)
def test_with_empty_values(self):
response = self.post(self.endpoint, self.dataEmptyValues)
self.assertEqual(response.status_code, 400)
def test_with_no_name_property(self):
response = self.post(self.endpoint, self.dataNoNameProperty)
self.assertEqual(response.status_code, 400)
| 36.304762
| 128
| 0.635887
| 3,696
| 0.96957
| 0
| 0
| 0
| 0
| 0
| 0
| 591
| 0.155037
|
5497dc6a086f32d3001f4b0c68ed070534942148
| 179
|
py
|
Python
|
tests/_compat.py
|
lanius/hunk
|
bba04d9fb7f37c378ea41bc934c3a02401e34fe6
|
[
"MIT"
] | 1
|
2015-04-03T08:35:41.000Z
|
2015-04-03T08:35:41.000Z
|
tests/_compat.py
|
lanius/hunk
|
bba04d9fb7f37c378ea41bc934c3a02401e34fe6
|
[
"MIT"
] | null | null | null |
tests/_compat.py
|
lanius/hunk
|
bba04d9fb7f37c378ea41bc934c3a02401e34fe6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
PY2 = sys.version_info[0] == 2
if not PY2:
json_text = lambda rv: rv.data.decode(rv.charset)
else:
json_text = lambda rv: rv.data
| 12.785714
| 53
| 0.625698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 23
| 0.128492
|
549905ffeca6d09d599080cd848b9e365ea51dd3
| 763
|
py
|
Python
|
oriskami/test/resources/test_router_data.py
|
oriskami/oriskami-python
|
2b0d81f713a9149977907183c67eec136d49ee8c
|
[
"MIT"
] | 4
|
2017-05-28T19:37:31.000Z
|
2017-06-13T11:34:26.000Z
|
oriskami/test/resources/test_router_data.py
|
ubivar/ubivar-python
|
2b0d81f713a9149977907183c67eec136d49ee8c
|
[
"MIT"
] | null | null | null |
oriskami/test/resources/test_router_data.py
|
ubivar/ubivar-python
|
2b0d81f713a9149977907183c67eec136d49ee8c
|
[
"MIT"
] | null | null | null |
import os
import oriskami
import warnings
from oriskami.test.helper import (OriskamiTestCase)
class OriskamiAPIResourcesTests(OriskamiTestCase):
def test_router_data_update(self):
response = oriskami.RouterData.update("0", is_active="true")
self.assertTrue(hasattr(response.data, "__iter__"))
self.assertEqual(response.data[0].is_active, "true")
response = oriskami.RouterData.update("0", is_active="false")
self.assertEqual(response.data[0].is_active, "false")
def test_router_data_list(self):
response = oriskami.RouterData.list()
self.assertTrue(hasattr(response.data, "__iter__"))
self.assertTrue(len(response.data), 1)
self.assertTrue(hasattr(response.data[0], "is_active"))
| 38.15
| 69
| 0.714286
| 667
| 0.874181
| 0
| 0
| 0
| 0
| 0
| 0
| 63
| 0.082569
|
54990a8312bff53b0e8f90e7a2361334c451c834
| 1,625
|
py
|
Python
|
osbot_aws/helpers/IAM_Policy.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
osbot_aws/helpers/IAM_Policy.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
osbot_aws/helpers/IAM_Policy.py
|
artem7902/OSBot-AWS
|
4b676b8323f18d3d9809d41263f3a71745ec2828
|
[
"Apache-2.0"
] | null | null | null |
from osbot_aws.apis.IAM import IAM
class IAM_Policy:
def __init__(self, policy_name=None, policy_path=None):
self.iam = IAM()
self.policy_name = policy_name
self.version = "2012-10-17"
self.statements = []
self.policy_path = policy_path
self.account_id = self.iam.account_id()
def add_cloud_watch(self, resource_arn):
return self.add_statement_allow(["logs:CreateLogGroup","logs:CreateLogStream","logs:PutLogEvents"], [resource_arn])
def add_statement(self, effect, actions, resources):
self.statements.append({"Effect" : effect ,
"Action" : actions ,
"Resource" : resources})
return self
def add_statement_allow(self, actions, resources):
return self.add_statement('Allow', actions,resources)
def create(self,delete_before_create=False):
if self.policy_name is None:
return {'status':'error', 'data':'policy name is None'}
return self.iam.policy_create(self.policy_name, self.statement(), delete_before_create=delete_before_create)
def delete(self):
return self.iam.policy_delete(self.policy_arn())
def exists(self):
return self.iam.policy_exists(self.policy_arn())
def policy_arn(self):
return self.iam.policy_arn(self.policy_name, self.policy_path, self.account_id)
def statement(self):
return { 'Version' : self.version , 'Statement': self.statements}
def statement_from_aws(self):
return self.iam.policy_statement(self.policy_arn())
| 36.111111
| 123
| 0.649846
| 1,586
| 0.976
| 0
| 0
| 0
| 0
| 0
| 0
| 169
| 0.104
|
5499335d4a53f32fd4ee6cd0b97b91f92adeec0e
| 3,959
|
py
|
Python
|
data_visualization.py
|
vashineyu/Common_tools
|
b933660e007ae104910c975d074523012bb7b58e
|
[
"Apache-2.0"
] | 1
|
2018-10-26T09:33:26.000Z
|
2018-10-26T09:33:26.000Z
|
data_visualization.py
|
vashineyu/Common_tools
|
b933660e007ae104910c975d074523012bb7b58e
|
[
"Apache-2.0"
] | null | null | null |
data_visualization.py
|
vashineyu/Common_tools
|
b933660e007ae104910c975d074523012bb7b58e
|
[
"Apache-2.0"
] | null | null | null |
# Visualization function
import numpy as np
import matplotlib.pyplot as plt
from math import ceil
from PIL import Image
from scipy.ndimage.filters import gaussian_filter
def img_combine(img, ncols=5, size=1, path=False):
"""
Draw the images with array
img: image array to plot - size = n x im_w x im_h x 3
"""
nimg= img.shape[0]
nrows=int(ceil(nimg/ncols))
fig, axes = plt.subplots(nrows=nrows, ncols=ncols, sharex=True, sharey=True, figsize=(ncols*size,nrows*size))
if nrows==0:
return
elif ncols == 1:
for r, ax in zip(np.arange(nrows), axes):
nth=r
if nth < nimg:
ax.imshow(img[nth])
ax.set_axis_off()
elif nrows==1:
for c, ax in zip(np.arange(ncols), axes):
nth=c
if nth < nimg:
ax.imshow(img[nth])
ax.set_axis_off()
else:
for r, row in zip(np.arange(nrows), axes):
for c, ax in zip(np.arange(ncols), row):
nth=r*ncols+c
if nth < nimg:
ax.imshow(img[nth])
ax.set_axis_off()
if path:
plt.tight_layout()
plt.savefig(path, dpi = 300)
plt.show()
def get_image_for_paper(original_image_object, prediction_map, IHC_map=None,
activation_threshold=0.3, overlay_alpha=0.6, sigma_filter=128,
mix=False, colormap_style="coolwarm"):
"""
Get paper used images (raw, overlay_only, raw+overlay, IHC responding region)
Args:
- original_image_object: PIL image obejct
- prediction_map: Array of prediction
- IHC_map: PIL object of IHC
- overlap_alpha: control overlay color (0. - 1.0)
- sigma_filter: Use a Gaussian filter to smooth the prediction map (prevent grid-like looking)
- mix: True/False, True: return combined map
Returns:
Tuple of PIL images
- (raw, overlay, raw+overlay, IHC)
"""
# Prediction map filtering
if sigma_filter > 0:
pred_smooth = gaussian_filter(prediction_map, sigma=sigma_filter)
else:
pred_smooth = prediction_map
# Create a overlap map
cm = plt.get_cmap(colormap_style)
overlay = cm(pred_smooth) * 255
mr, mc = np.where(pred_smooth > activation_threshold)
nr, nc = np.where(pred_smooth < activation_threshold)
overlay[nr, nc, :] = 255
overlay[nr, nc, 3] = 0
overlay[mr, mc, 3] = pred_smooth[mr, mc] * 255 * overlay_alpha
overlay = Image.fromarray(overlay.astype('uint8'))
# Render overlay to original image
render = original_image_object.copy()
render.paste(im=overlay, box=(0, 0), mask=overlay)
if not mix:
return (original_image_object, overlay, render, IHC_map)
else:
"""
raw | overlay
---------------------
raw+overlay | IHC
"""
sz = tuple([int(i / 4) for i in original_image_object.size])
raw_arr = np.array(original_image_object.resize(sz)) # RGBA
overlay = np.array(overlay.resize(sz)) # RGBA
render = np.array(render.resize(sz)) # RGBA
IHC_map = np.array(IHC_map.resize(sz)) if IHC_map is not None else np.zeros((sz + (4,)))
r1 = np.hstack((raw_arr, overlay))
r2 = np.hstack((render, IHC_map))
mixed = np.vstack((r1, r2))
return Image.fromarray(mixed.astype('uint8'))
def plot_mask_on_image(img, mask, color=[0, 255, 255], alpha=0.3):
'''Plot colorful masks on the image
img: cv2 image
mask: boolean array or np.where
color: BGR triplet [_, _, _]. Default: [0, 255, 255] is yellow
alpha: transparency. float [0, 1]
Ref: http://www.pyimagesearch.com/2016/03/07/transparent-overlays-with-opencv/
'''
out = img.copy()
img_layer = img.copy()
img_layer[mask] = color
out = cv2.addWeighted(img_layer, alpha, out, 1-alpha, 0, out)
return out
| 34.12931
| 113
| 0.602172
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,170
| 0.295529
|
5499a0762a3bf6035430062da7d86593750133d8
| 2,037
|
py
|
Python
|
src/CIA_History.py
|
Larz60p/WorldFactBook
|
c2edb4c8b0b9edab4a41b7384aade6d1d8ce6128
|
[
"MIT"
] | 1
|
2019-03-29T03:33:43.000Z
|
2019-03-29T03:33:43.000Z
|
src/CIA_History.py
|
Larz60p/WorldFactBook
|
c2edb4c8b0b9edab4a41b7384aade6d1d8ce6128
|
[
"MIT"
] | null | null | null |
src/CIA_History.py
|
Larz60p/WorldFactBook
|
c2edb4c8b0b9edab4a41b7384aade6d1d8ce6128
|
[
"MIT"
] | null | null | null |
# copyright (c) 2018 Larz60+
from lxml import html
import ScraperPaths
import CIA_ScanTools
import GetPage
import os
import json
import sys
from bs4 import BeautifulSoup
class CIA_History:
def __init__(self):
self.spath = ScraperPaths.ScraperPaths()
self.gp = GetPage.GetPage()
self.getpage = self.gp.get_page
self.get_filename = self.gp.get_filename
self.cst = CIA_ScanTools.CIA_Scan_Tools()
self.fact_links = self.cst.fact_links
url = 'https://www.cia.gov/library/publications/resources/the-world-factbook/docs/history.html'
filename = self.get_filename(url)
self.get_history(url, filename)
self.cst.save_fact_links()
def get_history(self, url, filename):
page = self.getpage(url, filename)
c1 = self.fact_links['History'] = {}
soup = BeautifulSoup(page, 'lxml')
tables = soup.findAll('table')
trs = tables[1].find_all('tr')
for n, tr in enumerate(trs):
if n == 0:
item = tr.find('span', {'class': 'h1'})
title = item.text
c2 = c1[title] = {}
elif n == 1:
allps = tr.find_all('p')
descr = []
for p in allps:
descr.append(p.text)
c2['Description'] = descr
trs = tables[3].find_all('tr')
for n, tr in enumerate(trs):
if n == 0:
title1 = tr.find('span').text
c3 = c2[title1] = {}
elif n == 1:
subtext = tr.find('p').text
c3['subtitle'] = subtext
elif n == 2:
newtable = tr.find('table')
newtrs = newtable.find_all('tr')
for newtr in newtrs:
newtds = newtr.find_all('td')
year = newtds[0].text
year_text = newtds[1].text
c3[year] = year_text
if __name__ == '__main__':
CIA_History()
| 31.828125
| 103
| 0.522337
| 1,817
| 0.891998
| 0
| 0
| 0
| 0
| 0
| 0
| 225
| 0.110457
|
549b59fe62af96d3a0abf31ed9194bf5c91e167c
| 301
|
py
|
Python
|
tests/thumbnail_tests/urls.py
|
roojoom/sorl-thumbnail
|
f10fd48f8b33efe4f468ece056fd545be796bf72
|
[
"BSD-3-Clause"
] | 2
|
2019-04-09T16:07:23.000Z
|
2019-04-09T16:07:26.000Z
|
tests/thumbnail_tests/urls.py
|
roojoom/sorl-thumbnail
|
f10fd48f8b33efe4f468ece056fd545be796bf72
|
[
"BSD-3-Clause"
] | null | null | null |
tests/thumbnail_tests/urls.py
|
roojoom/sorl-thumbnail
|
f10fd48f8b33efe4f468ece056fd545be796bf72
|
[
"BSD-3-Clause"
] | 1
|
2020-02-18T13:00:55.000Z
|
2020-02-18T13:00:55.000Z
|
from django.conf.urls import patterns
from django.conf import settings
urlpatterns = patterns(
'',
(r'^media/(?P<path>.+)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
(r'^(.*\.html)$', 'thumbnail_tests.views.direct_to_template'),
)
| 27.363636
| 67
| 0.671096
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 138
| 0.458472
|
549b88a77a4a74ecdad5b7ba7eb748aea0547a53
| 822
|
py
|
Python
|
data/mapper.py
|
GhostBadger/Kurien_G_DataViz_Fall2020
|
817f1a352027d4d81db0260393912e78a2a5e596
|
[
"MIT"
] | null | null | null |
data/mapper.py
|
GhostBadger/Kurien_G_DataViz_Fall2020
|
817f1a352027d4d81db0260393912e78a2a5e596
|
[
"MIT"
] | 1
|
2020-12-13T03:46:44.000Z
|
2020-12-13T03:46:44.000Z
|
data/mapper.py
|
GhostBadger/Kurien_G_DataViz_Fall2020
|
817f1a352027d4d81db0260393912e78a2a5e596
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
hfont = {'fontname':'Lato'}
#draw a simple line chart showing population grown over the last 115 years
years = [1900, 1950, 1955, 1960, 1965, 1970, 1975, 1980, 1985, 1990, 1995, 2000, 2005, 2010, 2015]
pops = [1.6, 2.5, 2.6, 3.0, 3.3, 3.6, 4.2, 4.4, 4.8, 5.3, 5.7, 6.1, 6.5, 6.9, 7.3]
# plot out chart with the data above, and also format the line color and width
plt.plot(years, pops, color=(0/255, 100/255, 100/255), linewidth=3.0)
# label on the left hand side
plt.ylabel("World population by Billions")
# label on the bottom of the chart
plt.xlabel("Population growth by year")
# add a title to the chart
plt.title("World Population Growth", pad="20", **hfont)
#run the show method (this lives inside the pyplot package)
#this wil generate a graphic in a new window
plt.show()
| 31.615385
| 98
| 0.69708
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 446
| 0.542579
|
549b92a869131a02e61a4b0496d5ecab3305509e
| 28,057
|
py
|
Python
|
classification/train_classifier_tf.py
|
dnarqq/WildHack
|
4fb9e4545cb47a4283ebc1dec955c0817b1664c0
|
[
"MIT"
] | 402
|
2019-05-08T17:28:25.000Z
|
2022-03-27T19:30:07.000Z
|
classification/train_classifier_tf.py
|
dnarqq/WildHack
|
4fb9e4545cb47a4283ebc1dec955c0817b1664c0
|
[
"MIT"
] | 72
|
2019-05-07T18:33:32.000Z
|
2022-03-10T07:48:39.000Z
|
classification/train_classifier_tf.py
|
dnarqq/WildHack
|
4fb9e4545cb47a4283ebc1dec955c0817b1664c0
|
[
"MIT"
] | 162
|
2019-05-18T15:45:27.000Z
|
2022-03-25T20:17:45.000Z
|
r"""Train an EfficientNet classifier.
Currently implementation of multi-label multi-class classification is
non-functional.
During training, start tensorboard from within the classification/ directory:
tensorboard --logdir run --bind_all --samples_per_plugin scalars=0,images=0
Example usage:
python train_classifier_tf.py run_idfg /ssd/crops_sq \
-m "efficientnet-b0" --pretrained --finetune --label-weighted \
--epochs 50 --batch-size 512 --lr 1e-4 \
--seed 123 \
--logdir run_idfg
"""
from __future__ import annotations
import argparse
from collections import defaultdict
from collections.abc import Callable, Mapping, MutableMapping, Sequence
from datetime import datetime
import json
import os
from typing import Any, Optional
import uuid
import numpy as np
import sklearn.metrics
import tensorflow as tf
from tensorboard.plugins.hparams import api as hp
import tqdm
from classification.train_utils import (
HeapItem, recall_from_confusion_matrix, add_to_heap, fig_to_img,
imgs_with_confidences, load_dataset_csv, prefix_all_keys)
from visualization import plot_utils
AUTOTUNE = tf.data.experimental.AUTOTUNE
# match pytorch EfficientNet model names
EFFICIENTNET_MODELS: Mapping[str, Mapping[str, Any]] = {
'efficientnet-b0': dict(cls='EfficientNetB0', img_size=224, dropout=0.2),
'efficientnet-b1': dict(cls='EfficientNetB1', img_size=240, dropout=0.2),
'efficientnet-b2': dict(cls='EfficientNetB2', img_size=260, dropout=0.3),
'efficientnet-b3': dict(cls='EfficientNetB3', img_size=300, dropout=0.3),
'efficientnet-b4': dict(cls='EfficientNetB4', img_size=380, dropout=0.4),
'efficientnet-b5': dict(cls='EfficientNetB5', img_size=456, dropout=0.4),
'efficientnet-b6': dict(cls='EfficientNetB6', img_size=528, dropout=0.5),
'efficientnet-b7': dict(cls='EfficientNetB7', img_size=600, dropout=0.5)
}
def create_dataset(
img_files: Sequence[str],
labels: Sequence[Any],
sample_weights: Optional[Sequence[float]] = None,
img_base_dir: str = '',
transform: Optional[Callable[[tf.Tensor], Any]] = None,
target_transform: Optional[Callable[[Any], Any]] = None,
cache: bool | str = False
) -> tf.data.Dataset:
"""Create a tf.data.Dataset.
The dataset returns elements (img, label, img_file, sample_weight) if
sample_weights is not None, or (img, label, img_file) if
sample_weights=None.
img: tf.Tensor, shape [H, W, 3], type uint8
label: tf.Tensor
img_file: tf.Tensor, scalar, type str
sample_weight: tf.Tensor, scalar, type float32
Possible TODO: oversample the imbalanced classes
see tf.data.experimental.sample_from_datasets
Args:
img_files: list of str, relative paths from img_base_dir
labels: list of int if multilabel=False
sample_weights: optional list of float
img_base_dir: str, base directory for images
transform: optional transform to apply to a single uint8 JPEG image
target_transform: optional transform to apply to a single label
cache: bool or str, cache images in memory if True, cache images to
a file on disk if a str
Returns: tf.data.Dataset
"""
# images dataset
img_ds = tf.data.Dataset.from_tensor_slices(img_files)
img_ds = img_ds.map(lambda p: tf.io.read_file(img_base_dir + os.sep + p),
num_parallel_calls=AUTOTUNE)
# for smaller disk / memory usage, we cache the raw JPEG bytes instead
# of the decoded Tensor
if isinstance(cache, str):
img_ds = img_ds.cache(cache)
elif cache:
img_ds = img_ds.cache()
# convert JPEG bytes to a 3D uint8 Tensor
# keras EfficientNet already includes normalization from [0, 255] to [0, 1],
# so we don't need to do that here
img_ds = img_ds.map(lambda img: tf.io.decode_jpeg(img, channels=3))
if transform:
img_ds = img_ds.map(transform, num_parallel_calls=AUTOTUNE)
# labels dataset
labels_ds = tf.data.Dataset.from_tensor_slices(labels)
if target_transform:
labels_ds = labels_ds.map(target_transform, num_parallel_calls=AUTOTUNE)
# img_files dataset
img_files_ds = tf.data.Dataset.from_tensor_slices(img_files)
if sample_weights is None:
return tf.data.Dataset.zip((img_ds, labels_ds, img_files_ds))
# weights dataset
weights_ds = tf.data.Dataset.from_tensor_slices(sample_weights)
return tf.data.Dataset.zip((img_ds, labels_ds, img_files_ds, weights_ds))
def create_dataloaders(
dataset_csv_path: str,
label_index_json_path: str,
splits_json_path: str,
cropped_images_dir: str,
img_size: int,
multilabel: bool,
label_weighted: bool,
weight_by_detection_conf: bool | str,
batch_size: int,
augment_train: bool,
cache_splits: Sequence[str]
) -> tuple[dict[str, tf.data.Dataset], list[str]]:
"""
Args:
dataset_csv_path: str, path to CSV file with columns
['dataset', 'location', 'label'], where label is a comma-delimited
list of labels
splits_json_path: str, path to JSON file
augment_train: bool, whether to shuffle/augment the training set
cache_splits: list of str, splits to cache
training set is cached at /mnt/tempds/random_file_name
validation and test sets are cached in memory
Returns:
datasets: dict, maps split to DataLoader
label_names: list of str, label names in order of label id
"""
df, label_names, split_to_locs = load_dataset_csv(
dataset_csv_path, label_index_json_path, splits_json_path,
multilabel=multilabel, label_weighted=label_weighted,
weight_by_detection_conf=weight_by_detection_conf)
# define the transforms
# efficientnet data preprocessing:
# - train:
# 1) random crop: aspect_ratio_range=(0.75, 1.33), area_range=(0.08, 1.0)
# 2) bicubic resize to img_size
# 3) random horizontal flip
# - test:
# 1) center crop
# 2) bicubic resize to img_size
@tf.function
def train_transform(img: tf.Tensor) -> tf.Tensor:
"""Returns: tf.Tensor, shape [img_size, img_size, C], type float32"""
img = tf.image.resize_with_pad(img, img_size, img_size,
method=tf.image.ResizeMethod.BICUBIC)
img = tf.image.random_flip_left_right(img)
img = tf.image.random_brightness(img, max_delta=0.25)
img = tf.image.random_contrast(img, lower=0.75, upper=1.25)
img = tf.image.random_saturation(img, lower=0.75, upper=1.25)
return img
@tf.function
def test_transform(img: tf.Tensor) -> tf.Tensor:
"""Returns: tf.Tensor, shape [img_size, img_size, C], type float32"""
img = tf.image.resize_with_pad(img, img_size, img_size,
method=tf.image.ResizeMethod.BICUBIC)
return img
dataloaders = {}
for split, locs in split_to_locs.items():
is_train = (split == 'train') and augment_train
split_df = df[df['dataset_location'].isin(locs)]
weights = None
if label_weighted or weight_by_detection_conf:
# weights sums to:
# - if weight_by_detection_conf: (# images in split - conf delta)
# - otherwise: (# images in split)
weights = split_df['weights'].tolist()
if not weight_by_detection_conf:
assert np.isclose(sum(weights), len(split_df))
cache: bool | str = (split in cache_splits)
if split == 'train' and 'train' in cache_splits:
unique_filename = str(uuid.uuid4())
os.makedirs('/mnt/tempds/', exist_ok=True)
cache = f'/mnt/tempds/{unique_filename}'
ds = create_dataset(
img_files=split_df['path'].tolist(),
labels=split_df['label_index'].tolist(),
sample_weights=weights,
img_base_dir=cropped_images_dir,
transform=train_transform if is_train else test_transform,
target_transform=None,
cache=cache)
if is_train:
ds = ds.shuffle(1000, reshuffle_each_iteration=True)
ds = ds.batch(batch_size).prefetch(buffer_size=AUTOTUNE)
dataloaders[split] = ds
return dataloaders, label_names
def build_model(model_name: str, num_classes: int, img_size: int,
pretrained: bool, finetune: bool) -> tf.keras.Model:
"""Creates a model with an EfficientNet base."""
class_name = EFFICIENTNET_MODELS[model_name]['cls']
dropout = EFFICIENTNET_MODELS[model_name]['dropout']
model_class = tf.keras.applications.__dict__[class_name]
weights = 'imagenet' if pretrained else None
inputs = tf.keras.layers.Input(shape=(img_size, img_size, 3))
base_model = model_class(
input_tensor=inputs, weights=weights, include_top=False, pooling='avg')
if finetune:
# freeze the base model's weights, including BatchNorm statistics
# https://www.tensorflow.org/guide/keras/transfer_learning#fine-tuning
base_model.trainable = False
# rebuild output
x = tf.keras.layers.Dropout(dropout, name='top_dropout')(base_model.output)
outputs = tf.keras.layers.Dense(
num_classes,
kernel_initializer=tf.keras.initializers.VarianceScaling(
scale=1. / 3., mode='fan_out', distribution='uniform'),
name='logits')(x)
model = tf.keras.Model(inputs, outputs, name='complete_model')
model.base_model = base_model # cache this so that we can turn off finetune
return model
def main(dataset_dir: str,
cropped_images_dir: str,
multilabel: bool,
model_name: str,
pretrained: bool,
finetune: int,
label_weighted: bool,
weight_by_detection_conf: bool | str,
epochs: int,
batch_size: int,
lr: float,
weight_decay: float,
seed: Optional[int] = None,
logdir: str = '',
cache_splits: Sequence[str] = ()) -> None:
"""Main function."""
# input validation
assert os.path.exists(dataset_dir)
assert os.path.exists(cropped_images_dir)
if isinstance(weight_by_detection_conf, str):
assert os.path.exists(weight_by_detection_conf)
# set seed
seed = np.random.randint(10_000) if seed is None else seed
np.random.seed(seed)
tf.random.set_seed(seed)
# create logdir and save params
params = dict(locals()) # make a copy
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') # '20200722_110816'
logdir = os.path.join(logdir, timestamp)
os.makedirs(logdir, exist_ok=True)
print('Created logdir:', logdir)
with open(os.path.join(logdir, 'params.json'), 'w') as f:
json.dump(params, f, indent=1)
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
tf.config.experimental.set_memory_growth(gpu, True)
img_size = EFFICIENTNET_MODELS[model_name]['img_size']
# create dataloaders and log the index_to_label mapping
loaders, label_names = create_dataloaders(
dataset_csv_path=os.path.join(dataset_dir, 'classification_ds.csv'),
label_index_json_path=os.path.join(dataset_dir, 'label_index.json'),
splits_json_path=os.path.join(dataset_dir, 'splits.json'),
cropped_images_dir=cropped_images_dir,
img_size=img_size,
multilabel=multilabel,
label_weighted=label_weighted,
weight_by_detection_conf=weight_by_detection_conf,
batch_size=batch_size,
augment_train=True,
cache_splits=cache_splits)
writer = tf.summary.create_file_writer(logdir)
writer.set_as_default()
model = build_model(
model_name, num_classes=len(label_names), img_size=img_size,
pretrained=pretrained, finetune=finetune > 0)
# define loss function and optimizer
loss_fn: tf.keras.losses.Loss
if multilabel:
loss_fn = tf.keras.losses.BinaryCrossentropy(
from_logits=True, reduction=tf.keras.losses.Reduction.NONE)
else:
loss_fn = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True, reduction=tf.keras.losses.Reduction.NONE)
# using EfficientNet training defaults
# - batch norm momentum: 0.99
# - optimizer: RMSProp, decay 0.9 and momentum 0.9
# - epochs: 350
# - learning rate: 0.256, decays by 0.97 every 2.4 epochs
# - weight decay: 1e-5
lr_schedule = tf.keras.optimizers.schedules.ExponentialDecay(
lr, decay_steps=1, decay_rate=0.97, staircase=True)
optimizer = tf.keras.optimizers.RMSprop(
learning_rate=lr, rho=0.9, momentum=0.9)
best_epoch_metrics: dict[str, float] = {}
for epoch in range(epochs):
print(f'Epoch: {epoch}')
optimizer.learning_rate = lr_schedule(epoch)
tf.summary.scalar('lr', optimizer.learning_rate, epoch)
if epoch > 0 and finetune == epoch:
print('Turning off fine-tune!')
model.base_model.trainable = True
print('- train:')
# TODO: change weighted to False if oversampling minority classes
train_metrics, train_heaps, train_cm = run_epoch(
model, loader=loaders['train'], weighted=label_weighted,
loss_fn=loss_fn, weight_decay=weight_decay, optimizer=optimizer,
finetune=finetune > epoch, return_extreme_images=True)
train_metrics = prefix_all_keys(train_metrics, prefix='train/')
log_run('train', epoch, writer, label_names,
metrics=train_metrics, heaps=train_heaps, cm=train_cm)
print('- val:')
val_metrics, val_heaps, val_cm = run_epoch(
model, loader=loaders['val'], weighted=label_weighted,
loss_fn=loss_fn, return_extreme_images=True)
val_metrics = prefix_all_keys(val_metrics, prefix='val/')
log_run('val', epoch, writer, label_names,
metrics=val_metrics, heaps=val_heaps, cm=val_cm)
if val_metrics['val/acc_top1'] > best_epoch_metrics.get('val/acc_top1', 0): # pylint: disable=line-too-long
filename = os.path.join(logdir, f'ckpt_{epoch}.h5')
print(f'New best model! Saving checkpoint to {filename}')
model.save(filename)
best_epoch_metrics.update(train_metrics)
best_epoch_metrics.update(val_metrics)
best_epoch_metrics['epoch'] = epoch
print('- test:')
test_metrics, test_heaps, test_cm = run_epoch(
model, loader=loaders['test'], weighted=label_weighted,
loss_fn=loss_fn, return_extreme_images=True)
test_metrics = prefix_all_keys(test_metrics, prefix='test/')
log_run('test', epoch, writer, label_names,
metrics=test_metrics, heaps=test_heaps, cm=test_cm)
# stop training after 8 epochs without improvement
if epoch >= best_epoch_metrics['epoch'] + 8:
break
hparams_dict = {
'model_name': model_name,
'multilabel': multilabel,
'finetune': finetune,
'batch_size': batch_size,
'epochs': epochs
}
hp.hparams(hparams_dict)
writer.close()
def log_run(split: str, epoch: int, writer: tf.summary.SummaryWriter,
label_names: Sequence[str], metrics: MutableMapping[str, float],
heaps: Mapping[str, Mapping[int, list[HeapItem]]], cm: np.ndarray
) -> None:
"""Logs the outputs (metrics, confusion matrix, tp/fp/fn images) from a
single epoch run to Tensorboard.
Args:
metrics: dict, keys already prefixed with {split}/
"""
per_class_recall = recall_from_confusion_matrix(cm, label_names)
metrics.update(prefix_all_keys(per_class_recall, f'{split}/label_recall/'))
# log metrics
for metric, value in metrics.items():
tf.summary.scalar(metric, value, epoch)
# log confusion matrix
cm_fig = plot_utils.plot_confusion_matrix(cm, classes=label_names,
normalize=True)
cm_fig_img = tf.convert_to_tensor(fig_to_img(cm_fig)[np.newaxis, ...])
tf.summary.image(f'confusion_matrix/{split}', cm_fig_img, step=epoch)
# log tp/fp/fn images
for heap_type, heap_dict in heaps.items():
log_images_with_confidence(heap_dict, label_names, epoch=epoch,
tag=f'{split}/{heap_type}')
writer.flush()
def log_images_with_confidence(
heap_dict: Mapping[int, list[HeapItem]],
label_names: Sequence[str],
epoch: int,
tag: str) -> None:
"""
Args:
heap_dict: dict, maps label_id to list of HeapItem, where each HeapItem
data is a list [img, target, top3_conf, top3_preds, img_file],
and img is a tf.Tensor of shape [H, W, 3]
label_names: list of str, label names in order of label id
epoch: int
tag: str
"""
for label_id, heap in heap_dict.items():
label_name = label_names[label_id]
sorted_heap = sorted(heap, reverse=True) # sort largest to smallest
imgs_list = [item.data for item in sorted_heap]
fig, img_files = imgs_with_confidences(imgs_list, label_names)
# tf.summary.image requires input of shape [N, H, W, C]
fig_img = tf.convert_to_tensor(fig_to_img(fig)[np.newaxis, ...])
tf.summary.image(f'{label_name}/{tag}', fig_img, step=epoch)
tf.summary.text(f'{label_name}/{tag}_files', '\n\n'.join(img_files),
step=epoch)
def track_extreme_examples(tp_heaps: dict[int, list[HeapItem]],
fp_heaps: dict[int, list[HeapItem]],
fn_heaps: dict[int, list[HeapItem]],
inputs: tf.Tensor,
labels: tf.Tensor,
img_files: tf.Tensor,
logits: tf.Tensor) -> None:
"""Updates the 5 most extreme true-positive (tp), false-positive (fp), and
false-negative (fn) examples with examples from this batch.
Each HeapItem's data attribute is a tuple with:
- img: np.ndarray, shape [H, W, 3], type uint8
- label: int
- top3_conf: list of float
- top3_preds: list of float
- img_file: str
Args:
*_heaps: dict, maps label_id (int) to heap of HeapItems
inputs: tf.Tensor, shape [batch_size, H, W, 3], type float32
labels: tf.Tensor, shape [batch_size]
img_files: tf.Tensor, shape [batch_size], type tf.string
logits: tf.Tensor, shape [batch_size, num_classes]
"""
labels = labels.numpy().tolist()
inputs = inputs.numpy().astype(np.uint8)
img_files = img_files.numpy().astype(str).tolist()
batch_probs = tf.nn.softmax(logits, axis=1)
iterable = zip(labels, inputs, img_files, batch_probs)
for label, img, img_file, confs in iterable:
label_conf = confs[label].numpy().item()
top3_conf, top3_preds = tf.math.top_k(confs, k=3, sorted=True)
top3_conf = top3_conf.numpy().tolist()
top3_preds = top3_preds.numpy().tolist()
data = (img, label, top3_conf, top3_preds, img_file)
if top3_preds[0] == label: # true positive
item = HeapItem(priority=label_conf - top3_conf[1], data=data)
add_to_heap(tp_heaps[label], item, k=5)
else:
# false positive for top3_pred[0]
# false negative for label
item = HeapItem(priority=top3_conf[0] - label_conf, data=data)
add_to_heap(fp_heaps[top3_preds[0]], item, k=5)
add_to_heap(fn_heaps[label], item, k=5)
def run_epoch(model: tf.keras.Model,
loader: tf.data.Dataset,
weighted: bool,
top: Sequence[int] = (1, 3),
loss_fn: Optional[tf.keras.losses.Loss] = None,
weight_decay: float = 0,
finetune: bool = False,
optimizer: Optional[tf.keras.optimizers.Optimizer] = None,
return_extreme_images: bool = False
) -> tuple[
dict[str, float],
dict[str, dict[int, list[HeapItem]]],
np.ndarray
]:
"""Runs for 1 epoch.
Args:
model: tf.keras.Model
loader: tf.data.Dataset
weighted: bool, whether to use sample weights in calculating loss and
accuracy
top: tuple of int, list of values of k for calculating top-K accuracy
loss_fn: optional loss function, calculates the mean loss over a batch
weight_decay: float, L2-regularization constant
finetune: bool, if true sets model's dropout and BN layers to eval mode
optimizer: optional optimizer
Returns:
metrics: dict, metrics from epoch, contains keys:
'loss': float, mean per-example loss over entire epoch,
only included if loss_fn is not None
'acc_top{k}': float, accuracy@k over the entire epoch
heaps: dict, keys are ['tp', 'fp', 'fn'], values are heap_dicts,
each heap_dict maps label_id (int) to a heap of <= 5 HeapItems with
data attribute (img, target, top3_conf, top3_preds, img_file)
- 'tp': priority is the difference between target confidence and
2nd highest confidence
- 'fp': priority is the difference between highest confidence and
target confidence
- 'fn': same as 'fp'
confusion_matrix: np.ndarray, shape [num_classes, num_classes],
C[i, j] = # of samples with true label i, predicted as label j
"""
# if evaluating or finetuning, set dropout & BN layers to eval mode
is_train = False
train_dropout_and_bn = False
if optimizer is not None:
assert loss_fn is not None
is_train = True
if not finetune:
train_dropout_and_bn = True
reg_vars = [
v for v in model.trainable_variables if 'kernel' in v.name]
if loss_fn is not None:
losses = tf.keras.metrics.Mean()
accuracies_topk = {
k: tf.keras.metrics.SparseTopKCategoricalAccuracy(k) for k in top
}
# for each label, track 5 most-confident and least-confident examples
tp_heaps: dict[int, list[HeapItem]] = defaultdict(list)
fp_heaps: dict[int, list[HeapItem]] = defaultdict(list)
fn_heaps: dict[int, list[HeapItem]] = defaultdict(list)
all_labels = []
all_preds = []
tqdm_loader = tqdm.tqdm(loader)
for batch in tqdm_loader:
if weighted:
inputs, labels, img_files, weights = batch
else:
# even if batch contains sample weights, don't use them
inputs, labels, img_files = batch[0:3]
weights = None
all_labels.append(labels.numpy())
desc = []
with tf.GradientTape(watch_accessed_variables=is_train) as tape:
outputs = model(inputs, training=train_dropout_and_bn)
if loss_fn is not None:
loss = loss_fn(labels, outputs)
if weights is not None:
loss *= weights
# we do not track L2-regularization loss in the loss metric
losses.update_state(loss, sample_weight=weights)
desc.append(f'Loss {losses.result().numpy():.4f}')
if optimizer is not None:
loss = tf.math.reduce_mean(loss)
if not finetune: # only regularize layers before the final FC
loss += weight_decay * tf.add_n(
tf.nn.l2_loss(v) for v in reg_vars)
all_preds.append(tf.math.argmax(outputs, axis=1).numpy())
if optimizer is not None:
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
for k, acc in accuracies_topk.items():
acc.update_state(labels, outputs, sample_weight=weights)
desc.append(f'Acc@{k} {acc.result().numpy() * 100:.3f}')
tqdm_loader.set_description(' '.join(desc))
if return_extreme_images:
track_extreme_examples(tp_heaps, fp_heaps, fn_heaps, inputs,
labels, img_files, outputs)
confusion_matrix = sklearn.metrics.confusion_matrix(
y_true=np.concatenate(all_labels), y_pred=np.concatenate(all_preds))
metrics = {}
if loss_fn is not None:
metrics['loss'] = losses.result().numpy().item()
for k, acc in accuracies_topk.items():
metrics[f'acc_top{k}'] = acc.result().numpy().item() * 100
heaps = {'tp': tp_heaps, 'fp': fp_heaps, 'fn': fn_heaps}
return metrics, heaps, confusion_matrix
def _parse_args() -> argparse.Namespace:
"""Parses arguments."""
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='Trains classifier.')
parser.add_argument(
'dataset_dir',
help='path to directory containing: 1) classification dataset CSV, '
'2) label index JSON, 3) splits JSON')
parser.add_argument(
'cropped_images_dir',
help='path to local directory where image crops are saved')
parser.add_argument(
'--multilabel', action='store_true',
help='for multi-label, multi-class classification')
parser.add_argument(
'-m', '--model-name', default='efficientnet-b0',
choices=list(EFFICIENTNET_MODELS.keys()),
help='which EfficientNet model')
parser.add_argument(
'--pretrained', action='store_true',
help='start with pretrained model')
parser.add_argument(
'--finetune', type=int, default=0,
help='only fine tune the final fully-connected layer for the first '
'<finetune> epochs')
parser.add_argument(
'--label-weighted', action='store_true',
help='weight training samples to balance labels')
parser.add_argument(
'--weight-by-detection-conf', nargs='?', const=True, default=False,
help='weight training examples by detection confidence. '
'Optionally takes a .npz file for isotonic calibration.')
parser.add_argument(
'--epochs', type=int, default=0,
help='number of epochs for training, 0 for eval-only')
parser.add_argument(
'--batch-size', type=int, default=256,
help='batch size for both training and eval')
parser.add_argument(
'--lr', type=float, default=None,
help='initial learning rate, defaults to (0.016 * batch_size / 256)')
parser.add_argument(
'--weight-decay', type=float, default=1e-5,
help='weight decay')
parser.add_argument(
'--seed', type=int,
help='random seed')
parser.add_argument(
'--logdir', default='.',
help='directory where TensorBoard logs and a params file are saved')
parser.add_argument(
'--cache', nargs='*', choices=['train', 'val', 'test'], default=(),
help='which splits of the dataset to cache')
return parser.parse_args()
if __name__ == '__main__':
args = _parse_args()
if args.lr is None:
args.lr = 0.016 * args.batch_size / 256 # based on TF models repo
main(dataset_dir=args.dataset_dir,
cropped_images_dir=args.cropped_images_dir,
multilabel=args.multilabel,
model_name=args.model_name,
pretrained=args.pretrained,
finetune=args.finetune,
label_weighted=args.label_weighted,
weight_by_detection_conf=args.weight_by_detection_conf,
epochs=args.epochs,
batch_size=args.batch_size,
lr=args.lr,
weight_decay=args.weight_decay,
seed=args.seed,
logdir=args.logdir,
cache_splits=args.cache)
| 40.13877
| 116
| 0.641729
| 0
| 0
| 0
| 0
| 858
| 0.030581
| 0
| 0
| 9,187
| 0.327441
|
549bb5431eeb75a8dbdf100c69a7b7af3cb1061c
| 4,704
|
py
|
Python
|
pyreach/impl/constraints_impl_test.py
|
google-research/pyreach
|
f91753ce7a26e77e122eb02a9fdd5a1ce3ce0159
|
[
"Apache-2.0"
] | 13
|
2021-09-01T01:10:22.000Z
|
2022-03-05T10:01:52.000Z
|
pyreach/impl/constraints_impl_test.py
|
google-research/pyreach
|
f91753ce7a26e77e122eb02a9fdd5a1ce3ce0159
|
[
"Apache-2.0"
] | null | null | null |
pyreach/impl/constraints_impl_test.py
|
google-research/pyreach
|
f91753ce7a26e77e122eb02a9fdd5a1ce3ce0159
|
[
"Apache-2.0"
] | 6
|
2021-09-20T21:17:53.000Z
|
2022-03-14T18:42:48.000Z
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for utils.py."""
from typing import Optional
import unittest
from pyreach import constraints
from pyreach.common.python import types_gen
from pyreach.impl import constraints_impl as impl
from pyreach.impl import test_data
class TestConstraintsImpl(unittest.TestCase):
def test_constraints_impl(self) -> None:
constraints_device = impl.ConstraintsDevice()
try:
constraints_device.start()
self.assertIsNone(constraints_device.get())
constraints_device.enqueue_device_data(
types_gen.DeviceData(
device_type="settings-engine",
data_type="key-value",
key="workcell_constraints.json",
value=test_data.get_workcell_constraints_json()))
constraints_device.wait(1)
cs: Optional[impl.ConstraintsImpl] = constraints_device.get()
self.assertIsNotNone(cs)
assert cs
self.assertIsNone(cs.get_joint_limits(""))
interactables = cs.get_interactables()
self.assertEqual(len(interactables), 2)
self.assertEqual(interactables[0].name, "LeftBox")
left_geometry = interactables[0].geometry
self.assertIsInstance(left_geometry, constraints.Box)
assert isinstance(left_geometry, constraints.Box)
self.assertEqual(left_geometry.pose.as_tuple(),
(-0.246944084763527, -0.705296516418457,
-0.168291628360748, 0.0, 0.0, 0.0))
self.assertEqual(
left_geometry.scale.as_tuple(),
(0.379999995231628, 0.259999990463257, 0.200000002980232))
self.assertEqual(interactables[1].name, "RightBox")
right_geometry = interactables[1].geometry
self.assertIsInstance(right_geometry, constraints.Box)
assert isinstance(right_geometry, constraints.Box)
self.assertEqual(right_geometry.pose.as_tuple(),
(0.254177570343018, -0.711709439754486,
-0.174813330173492, -6.585575275907331e-05,
-0.006104793682704136, -0.021574200980967757))
self.assertEqual(
right_geometry.scale.as_tuple(),
(0.370000004768372, 0.300000011920929, 0.200000002980232))
finally:
constraints_device.close()
def test_robot_constraints_impl(self) -> None:
constraints_device = impl.ConstraintsDevice("")
try:
constraints_device.start()
self.assertIsNone(constraints_device.get())
constraints_device.enqueue_device_data(
types_gen.DeviceData(
device_type="settings-engine",
data_type="key-value",
key="workcell_constraints.json",
value=test_data.get_workcell_constraints_json()))
self.assertIsNone(constraints_device.get())
constraints_device.enqueue_device_data(
types_gen.DeviceData(
device_type="robot",
data_type="key-value",
key="robot_constraints.json",
value=test_data.get_robot_constraints_json()))
constraints_device.wait(1)
cs: Optional[impl.ConstraintsImpl] = constraints_device.get()
self.assertIsNotNone(cs)
assert cs
joints = cs.get_joint_limits("")
self.assertIsNotNone(joints)
assert joints is not None
self.assertEqual(len(joints), 6)
self.assertEqual(joints[0].min, -6.335545214359173)
self.assertEqual(joints[0].max, 6.335545187179586)
self.assertEqual(joints[1].min, -6.335545214359173)
self.assertEqual(joints[1].max, 6.335545187179586)
self.assertEqual(joints[2].min, -6.335545214359173)
self.assertEqual(joints[2].max, 6.335545187179586)
self.assertEqual(joints[3].min, -6.335545214359173)
self.assertEqual(joints[3].max, 6.335545187179586)
self.assertEqual(joints[4].min, -6.335545214359173)
self.assertEqual(joints[4].max, 6.335545187179586)
self.assertEqual(joints[5].min, -6.335545214359173)
self.assertEqual(joints[5].max, 6.335545187179586)
self.assertEqual(len(cs.get_interactables()), 2)
finally:
constraints_device.close()
if __name__ == "__main__":
unittest.main()
| 39.864407
| 74
| 0.688776
| 3,846
| 0.817602
| 0
| 0
| 0
| 0
| 0
| 0
| 774
| 0.164541
|
549d785cbbd7f0e2ec80896ebc16b20cd8e0ba82
| 3,400
|
py
|
Python
|
qplan/parse.py
|
mackstann/qplaniso
|
97c4fbeeb529dfef0778cedc3e79087f6a87f5c4
|
[
"CC0-1.0"
] | null | null | null |
qplan/parse.py
|
mackstann/qplaniso
|
97c4fbeeb529dfef0778cedc3e79087f6a87f5c4
|
[
"CC0-1.0"
] | null | null | null |
qplan/parse.py
|
mackstann/qplaniso
|
97c4fbeeb529dfef0778cedc3e79087f6a87f5c4
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
import itertools
class Node:
def __init__(self, node_type, width, rows, times):
self.node_type = node_type
self.width = width
self.rows = rows
self.times = times
self.inputs = []
self.parent = None
def as_dict(self):
return {
'type': self.node_type,
'width': self.width,
'rows': self.rows,
'times': self.times,
'inputs': [ x.as_dict() for x in self.inputs ],
}
def indent_of_line(line):
return sum(1 for _ in itertools.takewhile(str.isspace, line))
def line_is_node(line):
return line_is_root(line) or line_is_child(line)
def line_is_child(line):
return line.lstrip().startswith('-> ')
def line_is_root(line):
return indent_of_line(line) == 1
def node_type(line):
return (
line
.split('->', 1)[-1]
.split('(', 1)[0]
.split(' on ', 1)[0]
.strip()
)
def node_width(line):
return int(
line
.split(' width=', 1)[1]
.split(')', 1)[0]
)
def node_rows(line):
return int(
line
.split(' rows=', 2)[2]
.split(' ', 1)[0]
)
def node_times(line):
# microseconds
parts = (
line
.split('actual time=', 1)[1]
.split(' ', 1)[0]
.split('..')
)
return [ int(1000 * float(n)) for n in parts ]
def parse(text):
last_indent = 0
indent = 0
root = None
node = None
for line in text.splitlines():
if line.strip() == 'QUERY PLAN':
continue
if line.strip() == '-'*len(line.strip()):
continue
if not line.strip():
continue
# analyze indent and traverse the graph as needed
if line_is_root(line):
last_indent = indent
indent = indent_of_line(line)
assert indent == 1
assert node is None
node = Node(node_type(line), node_width(line), node_rows(line), node_times(line))
root = node
elif line_is_child(line):
last_indent = indent
indent = indent_of_line(line)
assert indent > 1
assert indent % 2 == 1
assert node is not None
if indent == last_indent:
child = Node(node_type(line), node_width(line), node_rows(line), node_times(line))
child.parent = node.parent
node.parent.inputs.append(child)
node = child
elif indent > last_indent:
child = Node(node_type(line), node_width(line), node_rows(line), node_times(line))
child.parent = node
node.inputs.append(child)
node = child
elif indent < last_indent:
diff = last_indent - indent
while diff:
node = node.parent
diff -= 6
child = Node(node_type(line), node_width(line), node_rows(line), node_times(line))
child.parent = node.parent
node.parent.inputs.append(child)
node = child
else: # it's details of the current node
pass
return root
if __name__ == '__main__':
import pprint
with open('example-plan.txt') as f:
pprint.pprint(parse(f.read()).as_dict())
| 26.5625
| 98
| 0.523529
| 476
| 0.14
| 0
| 0
| 0
| 0
| 0
| 0
| 258
| 0.075882
|
549e3c5ec51f517db74f9b45d00df6b1a26198eb
| 2,397
|
py
|
Python
|
10054 - The Necklace/main.py
|
Shree-Gillorkar/uva-onlinejudge-solutions
|
df64f5c3a136827b5ca7871df1cf8aafadcf5c9b
|
[
"MIT"
] | 24
|
2017-10-15T04:04:55.000Z
|
2022-01-31T17:14:29.000Z
|
10054 - The Necklace/main.py
|
ashishrana080699/uva-onlinejudge-solutions
|
d2d0a58e53e3d9acf6d20e56a40900423ae705c4
|
[
"MIT"
] | 1
|
2019-07-11T04:22:55.000Z
|
2019-07-14T19:34:41.000Z
|
10054 - The Necklace/main.py
|
ashishrana080699/uva-onlinejudge-solutions
|
d2d0a58e53e3d9acf6d20e56a40900423ae705c4
|
[
"MIT"
] | 27
|
2017-01-06T17:33:57.000Z
|
2021-11-25T00:07:54.000Z
|
from sys import stdin
from collections import defaultdict, deque
MAX_COLORS = 51
def load_num():
return int(stdin.readline())
def load_pair():
return tuple(map(int, stdin.readline().split()))
def load_case():
nbeads = load_num()
return [load_pair() for b in range(nbeads)]
def build_necklace(beads):
"""Construct an euler circuit in the graph defined by the beads"""
# For a graph to have an euler circuit all vertices must have
# even degree. (Plus 0 or 2 odd vertices) Init and ckeck degree
amatrix = [defaultdict(int) for _ in range(MAX_COLORS)]
degree = defaultdict(int)
for b in beads:
amatrix[b[0]][b[1]] += 1
amatrix[b[1]][b[0]] += 1
degree[b[0]] +=1
degree[b[1]] +=1
for k, v in degree.items():
if v%2 != 0:
return None
# Create necklace using Fleury's algorithm
def get_next_bead(color):
""" """
s_color, s_degree = 0, 0
for col, deg in amatrix[color].items():
if deg > s_degree:
s_color, s_degree = col, deg
if s_degree>0:
amatrix[color][s_color] -= 1
amatrix[s_color][color] -= 1
return (color, s_color)
else:
return None
# Start construction
nxt = get_next_bead(beads[0][1])
necklace = deque([nxt])
while True:
nxt = get_next_bead(necklace[-1][1])
if nxt:
necklace.append(nxt)
elif len(beads) != len(necklace):
# Created a closed cycle.move last segment to the start
prev = necklace.pop()
necklace.appendleft(prev)
else:
break
return necklace
if __name__ == '__main__':
ncases = load_num()
for c in range(ncases):
beads = load_case()
necklace = build_necklace(beads)
# Print result
print("Case #{}".format(c+1))
if necklace:
# Print all necklace beads together for faster IO (damn timelimits)
# Almost a third of the time is wasted on IO
necklace_str = ""
for b in necklace:
necklace_str += "{} {}\n".format(b[0], b[1])
else:
necklace_str = "some beads may be lost\n"
if c+1 == ncases:
print(necklace_str[:-1])
else:
print(necklace_str)
| 27.238636
| 79
| 0.553191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 496
| 0.206925
|
549ee02e71d944702ec6c3b3ab3e03cf388c6552
| 458
|
py
|
Python
|
tests/test_eeg.py
|
y1ngyang/NeuroKit.py
|
867655f84bf210626649bca72258af6a2b5a2791
|
[
"MIT"
] | null | null | null |
tests/test_eeg.py
|
y1ngyang/NeuroKit.py
|
867655f84bf210626649bca72258af6a2b5a2791
|
[
"MIT"
] | null | null | null |
tests/test_eeg.py
|
y1ngyang/NeuroKit.py
|
867655f84bf210626649bca72258af6a2b5a2791
|
[
"MIT"
] | null | null | null |
import pytest
import doctest
import os
import numpy as np
import pandas as pd
import neurokit as nk
run_tests_in_local = False
#==============================================================================
# data
#==============================================================================
#def test_read_acqknowledge():
#
# assert 3 == 3
if __name__ == '__main__':
# nose.run(defaultTest=__name__)
doctest.testmod()
pytest.main()
| 16.962963
| 79
| 0.458515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 258
| 0.563319
|
549fb62cea23b9b1c82de165b05b9e48e6855b9f
| 231,371
|
py
|
Python
|
tests/semantics/models.py
|
dnikolay-ebc/FiLiP
|
9a84979da8dff4523cb91e40869070bd02aa91fe
|
[
"BSD-3-Clause"
] | 6
|
2021-11-21T21:57:38.000Z
|
2022-02-22T08:20:30.000Z
|
tests/semantics/models.py
|
RWTH-EBC/FiLiP
|
e294c5ef94b2b6ad9611316e50b5c550bcd77c1b
|
[
"BSD-3-Clause"
] | 83
|
2021-04-08T18:34:20.000Z
|
2022-03-30T12:18:32.000Z
|
tests/semantics/models.py
|
dnikolay-ebc/FiLiP
|
9a84979da8dff4523cb91e40869070bd02aa91fe
|
[
"BSD-3-Clause"
] | 5
|
2021-10-04T08:39:21.000Z
|
2022-03-30T07:30:57.000Z
|
"""
Autogenerated Models for the vocabulary described by the ontologies:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
https://w3id.org/saref (saref.ttl)
"""
from enum import Enum
from typing import Dict, Union, List
from filip.semantics.semantics_models import\
SemanticClass,\
SemanticIndividual,\
RelationField,\
DataField,\
SemanticDeviceClass,\
DeviceAttributeField,\
CommandField
from filip.semantics.semantics_manager import\
SemanticsManager,\
InstanceRegistry
semantic_manager: SemanticsManager = SemanticsManager(
instance_registry=InstanceRegistry(),
)
# ---------CLASSES--------- #
class Currency(SemanticClass):
"""
The Unit Of Measure For Price
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Energy_Unit(SemanticClass):
"""
The Unit Of Measure For Energy
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Illuminance_Unit(SemanticClass):
"""
The Unit Of Measure For Light
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Power_Unit(SemanticClass):
"""
The Unit Of Measure For Power
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Pressure_Unit(SemanticClass):
"""
The Unit Of Measure For Pressure
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Temperature_Unit(SemanticClass):
"""
The Unit Of Measure For Temperature
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Thing(SemanticClass):
"""
Predefined root_class
Source:
None (Predefined)
"""
def __new__(cls, *args, **kwargs):
kwargs['semantic_manager'] = semantic_manager
return super().__new__(cls, *args, **kwargs)
def __init__(self, *args, **kwargs):
kwargs['semantic_manager'] = semantic_manager
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Class1(Thing):
"""
Comment On Class 1
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
self.dataProp2.add(2)
self.objProp5.add(Individual1())
# Data fields
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class1a(Class1):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class1aa(Class1a):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class1b(Class1):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('some', [[Class2]]), ('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='some Class2, some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class2(Thing):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['customDataType1']])]
self.oProp1._rules = [('min|1', [[Class1]])]
self.objProp2._rules = [('only', [[Thing]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some customDataType1',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='min 1 Class1',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='only Thing',
semantic_manager=semantic_manager)
class Class3(Thing):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['string']])]
self.commandProp._rules = [('some', [['string']])]
self.dataProp1._rules = [('only', [['customDataType4']])]
self.oProp1._rules = [('value', [[Individual1]])]
self.objProp2._rules = [('some', [[Class1]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.commandProp._instance_identifier = self.get_identifier()
self.dataProp1._instance_identifier = self.get_identifier()
self.oProp1.add(Individual1())
self.objProp2.add(Individual1())
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some string',
semantic_manager=semantic_manager)
commandProp: DataField = DataField(
name='commandProp',
rule='some string',
semantic_manager=semantic_manager)
dataProp1: DataField = DataField(
name='dataProp1',
rule='only customDataType4',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='value Individual1',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some Class1, value Individual1',
semantic_manager=semantic_manager)
class Class123(Class1, Class2, Class3):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['string']]), ('some', [['customDataType1']])]
self.commandProp._rules = [('some', [['string']])]
self.dataProp1._rules = [('only', [['customDataType4']])]
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('value', [[Individual1]]), ('min|1', [[Class1]]), ('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1]]), ('value', [[Individual1]]), ('only', [[Thing]]), ('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.commandProp._instance_identifier = self.get_identifier()
self.dataProp1._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some string, some customDataType1',
semantic_manager=semantic_manager)
commandProp: DataField = DataField(
name='commandProp',
rule='some string',
semantic_manager=semantic_manager)
dataProp1: DataField = DataField(
name='dataProp1',
rule='only customDataType4',
semantic_manager=semantic_manager)
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='value Individual1, min 1 Class1, some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some Class1, value Individual1, only Thing, some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class13(Class1, Class3):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['string']])]
self.commandProp._rules = [('some', [['string']])]
self.dataProp1._rules = [('min|1', [['int']]), ('only', [['customDataType4']])]
self.dataProp2._rules = [('exactly|1', [['boolean']]), ('value', [[]])]
self.oProp1._rules = [('value', [[Individual1]]), ('some', [[Class2], [Class4]])]
self.objProp2._rules = [('some', [[Class1]]), ('value', [[Individual1]]), ('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.commandProp._instance_identifier = self.get_identifier()
self.dataProp1._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some string',
semantic_manager=semantic_manager)
commandProp: DataField = DataField(
name='commandProp',
rule='some string',
semantic_manager=semantic_manager)
dataProp1: DataField = DataField(
name='dataProp1',
rule='min 1 int, only customDataType4',
semantic_manager=semantic_manager)
dataProp2: DataField = DataField(
name='dataProp2',
rule='exactly 1 boolean, value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='value Individual1, some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some Class1, value Individual1, some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Class3a(Class3):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['string']])]
self.commandProp._rules = [('some', [['string']])]
self.dataProp1._rules = [('only', [['customDataType4']])]
self.oProp1._rules = [('value', [[Individual1]])]
self.objProp2._rules = [('some', [[Class1]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.commandProp._instance_identifier = self.get_identifier()
self.dataProp1._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some string',
semantic_manager=semantic_manager)
commandProp: DataField = DataField(
name='commandProp',
rule='some string',
semantic_manager=semantic_manager)
dataProp1: DataField = DataField(
name='dataProp1',
rule='only customDataType4',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='value Individual1',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some Class1, value Individual1',
semantic_manager=semantic_manager)
class Class3aa(Class3a):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['string']])]
self.commandProp._rules = [('some', [['string']])]
self.dataProp1._rules = [('only', [['customDataType4']])]
self.oProp1._rules = [('value', [[Individual1]])]
self.objProp2._rules = [('some', [[Class1]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.commandProp._instance_identifier = self.get_identifier()
self.dataProp1._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some string',
semantic_manager=semantic_manager)
commandProp: DataField = DataField(
name='commandProp',
rule='some string',
semantic_manager=semantic_manager)
dataProp1: DataField = DataField(
name='dataProp1',
rule='only customDataType4',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='value Individual1',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='some Class1, value Individual1',
semantic_manager=semantic_manager)
class Class4(Thing):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.objProp4._rules = [('min|1', [[Class1]])]
self.objProp4._instance_identifier = self.get_identifier()
# Relation fields
objProp4: RelationField = RelationField(
name='objProp4',
rule='min 1 Class1',
semantic_manager=semantic_manager)
class Command(Thing):
"""
A Directive That A Device Must Support To Perform A Certain Function. A
Command May Act Upon A State, But Does Not Necessarily Act Upon A State. For
Example, The On Command Acts Upon The On/Off State, But The Get Command Does
Not Act Upon Any State, It Simply Gives A Directive To Retrieve A Certain
Value. We Propose Here A List Of Commands That Are Relevant For The Purpose
Of Saref, But This List Can Be Extended.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Close_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Open_Close_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Open_Close_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Commodity(Thing):
"""
A Marketable Item For Which There Is Demand, But Which Is Supplied Without
Qualitative Differentiation Across A Market. Saref Refers To Energy
Commodities Such As Electricity, Gas, Coal And Oil.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Coal(Commodity):
"""
A Type Of Commodity
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Device(Thing):
"""
A Tangible Object Designed To Accomplish A Particular Task In Households,
Common Public Buildings Or Offices. In Order To Accomplish This Task, The
Device Performs One Or More Functions. For Example, A Washing Machine Is
Designed To Wash (Task) And To Accomplish This Task It Performs A Start And
Stop Function. Devices Can Be Structured In Categories (Subclasses) That
Reflect The Different Domain In Which A Device Is Used, E.G., Smart
Appliances Domain (Subclass Functionrelated) Vs. Building Domain (Subclass
Buildingrelated) Vs. Smart Grid Domain (Subclass Energyrelated). New
Categories Can Be Defined,If Needed, To Reflect Other Differences, For
Example Different Points Of View, Such As The Point Of View Of The Device'S
User Vs. The Point Of View Of The Device'S Manufacturer. We Propose A List
Of Devices That Are Relevant For The Purpose Of Saref, But This List Can Be
Extended.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Building_Related(Device):
"""
A Category That Includes Devices As Described By Building Related Data
Models, Such As Ifc And Fiemser
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Electricity(Commodity):
"""
A Type Of Commodity
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Energy_Related(Device):
"""
A Category That Considers Devices Based On Energy Consumption Information
And Profiles To Optimize Energy Efficiency.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Function(Thing):
"""
The Functionality Necessary To Accomplish The Task For Which A Device Is
Designed. A Device Can Be Designed To Perform More Than One Function.
Functions Can Be Structured In Categories (Subclasses) That Reflect
Different Points Of View, For Example, Considering The Specific Application
Area For Which A Function Can Be Used (E.G., Light, Temperature, Motion,
Heat, Power, Etc.), Or The Capability That A Function Can Support (E.G.,
Receive, Reply, Notify, Etc.), And So Forth.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class Actuating_Function(Function):
"""
A Function That Allows To Transmit Data To Actuators, Such As Level Settings
(E.G., Temperature) Or Binary Switching (E.G., Open/Close, On/Off)
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class Event_Function(Function):
"""
A Function That Allows To Notify Another Device That A Certain Threshold
Value Has Been Exceeded.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Notify_Command]]), ('min|1', [[Command]])]
self.Has_Threshold_Measurement._rules = [('min|1', [[Measurement]])]
self.Has_Command._instance_identifier = self.get_identifier()
self.Has_Threshold_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only Notify_Command, min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
Has_Threshold_Measurement: RelationField = RelationField(
name='Has_Threshold_Measurement',
rule='min 1 Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Associated With An Event Function To Notify That A Certain
Threshold Measurement Has Been Exceeded
"""
class Function_Related(Device):
"""
A Category That Considers Devices, Sensors And Their Specification In Terms
Of Functions, States And Services
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Actuator(Function_Related):
"""
A Device Responsible For Moving Or Controlling A Mechanism Or System By
Performing An Actuating Function
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Actuating_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Actuating_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Appliance(Function_Related):
"""
An Electrical/Mechanical Machine That Accomplish Some Household Functions,
Such As Cleaning Or Cooking
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Gas(Commodity):
"""
A Type Of Commodity
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Generator(Energy_Related):
"""
A Type Of Energy-Related Device That Generates Energy
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Gertrude(Class1, Class2):
"""
Generated SemanticClass without description
Source:
http://www.semanticweb.org/redin/ontologies/2020/11/untitled-ontology-25 (ParsingTesterOntology)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.attributeProp._rules = [('some', [['customDataType1']])]
self.dataProp2._rules = [('value', [[]])]
self.oProp1._rules = [('min|1', [[Class1]]), ('some', [[Class2], [Class4]])]
self.objProp2._rules = [('only', [[Thing]]), ('some', [[Class1, Class2]])]
self.objProp3._rules = [('some', [[Class3]])]
self.objProp4._rules = [('some', [[Class1, Class2, Class3]])]
self.objProp5._rules = [('some', [[Class1, Class2], [Class1, Class3]]), ('value', [[Individual1]])]
self.oProp1._instance_identifier = self.get_identifier()
self.objProp2._instance_identifier = self.get_identifier()
self.objProp3._instance_identifier = self.get_identifier()
self.objProp4._instance_identifier = self.get_identifier()
self.objProp5._instance_identifier = self.get_identifier()
self.attributeProp._instance_identifier = self.get_identifier()
self.dataProp2._instance_identifier = self.get_identifier()
# Data fields
attributeProp: DataField = DataField(
name='attributeProp',
rule='some customDataType1',
semantic_manager=semantic_manager)
dataProp2: DataField = DataField(
name='dataProp2',
rule='value 2',
semantic_manager=semantic_manager)
# Relation fields
oProp1: RelationField = RelationField(
name='oProp1',
rule='min 1 Class1, some (Class2 or Class4)',
inverse_of=['objProp3'],
semantic_manager=semantic_manager)
objProp2: RelationField = RelationField(
name='objProp2',
rule='only Thing, some (Class1 and Class2)',
semantic_manager=semantic_manager)
objProp3: RelationField = RelationField(
name='objProp3',
rule='some Class3',
inverse_of=['oProp1'],
semantic_manager=semantic_manager)
objProp4: RelationField = RelationField(
name='objProp4',
rule='some (Class1 and Class2) and Class3)',
semantic_manager=semantic_manager)
objProp5: RelationField = RelationField(
name='objProp5',
rule='some (Class1 and (Class2 or Class3)), value Individual1',
semantic_manager=semantic_manager)
class Get_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Get_Current_Meter_Value_Command(Get_Command):
"""
A Type Of Get Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Get_Meter_Data_Command(Get_Command):
"""
A Type Of Get Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Get_Meter_History_Command(Get_Command):
"""
A Type Of Get Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Get_Sensing_Data_Command(Get_Command):
"""
A Type Of Get Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Hvac(Function_Related):
"""
Heating, Ventilation And Air Conditioning (Hvac) Device That Provides Indoor
Environmental Comfort
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Comfort]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Comfort())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Comfort, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Level_Control_Function(Actuating_Function):
"""
An Actuating Function That Allows To Do Level Adjustments Of An Actuator In
A Certain Range (E.G., 0%-100%), Such As Dimming A Light Or Set The Speed Of
An Electric Motor.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Set_Absolute_Level_Command], [Set_Relative_Level_Command], [Step_Down_Command], [Step_Up_Command]]), ('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only (Set_Absolute_Level_Command or Set_Relative_Level_Command) or Step_Down_Command) or Step_Up_Command), min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class Lighting_Device(Function_Related):
"""
A Device Used For Illumination, Irradiation, Signaling, Or Projection
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Comfort]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Comfort())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Comfort, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Load(Energy_Related):
"""
A Type Of Energy-Related Device That Consumes Energy
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Measurement(Thing):
"""
Represents The Measured Value Made Over A Property. It Is Also Linked To The
Unit Of Measure In Which The Value Is Expressed And The Timestamp Of The
Measurement.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Timestamp._rules = [('only', [['dateTime']])]
self.Has_Value._rules = [('exactly|1', [['float']])]
self.Relates_To_Property._rules = [('only', [[Property]]), ('exactly|1', [[Property]])]
self.Relates_To_Property._instance_identifier = self.get_identifier()
self.Has_Timestamp._instance_identifier = self.get_identifier()
self.Has_Value._instance_identifier = self.get_identifier()
# Data fields
Has_Timestamp: DataField = DataField(
name='Has_Timestamp',
rule='only dateTime',
semantic_manager=semantic_manager)
"""
A Relationship Stating The Timestamp Of An Entity (E.G. A Measurement).
"""
Has_Value: DataField = DataField(
name='Has_Value',
rule='exactly 1 float',
semantic_manager=semantic_manager)
"""
A Relationship Defining The Value Of A Certain Property, E.G., Energy Or
Power
"""
# Relation fields
Relates_To_Property: RelationField = RelationField(
name='Relates_To_Property',
rule='only Property, exactly 1 Property',
semantic_manager=semantic_manager)
"""
A Relationship Between A Measurement And The Property It Relates To
"""
class Meter(Function_Related):
"""
A Device Built To Accurately Detect And Display A Quantity In A Form
Readable By A Human Being. Further, A Device Of Category Saref:Meter That
Performs A Saref:Meteringfunction.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Metering_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Metering_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Energy_Meter(Meter):
"""
An Energy Meter Is A Device Of Category Saref:Meter That Consists Of A
Meter, Accomplishes The Tasks Saref:Meterreading And Saref:Energyefficiency,
Performs The Saref:Meteringfunction And Is Used For The Purpose Of Measuring
The Saref:Energy Property
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Energyefficiency]]), ('value', [[Meter_Reading]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('some', [[Meter]]), ('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Metering_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('some', [[Energy]]), ('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Energyefficiency())
self.Accomplishes.add(Meter_Reading())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Energyefficiency, value Meter_Reading, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='some Meter, only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Metering_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='some Energy, only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Metering_Function(Function):
"""
A Function That Allows To Get Data From A Meter, Such As Current Meter
Reading Or Instantaneous Demand
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Get_Current_Meter_Value_Command], [Get_Meter_Data_Command], [Get_Meter_History_Command]]), ('min|1', [[Command]])]
self.Has_Meter_Reading_Type._rules = [('only', [[Commodity], [Property]])]
self.Has_Meter_Reading._rules = [('only', [[Measurement]])]
self.Has_Command._instance_identifier = self.get_identifier()
self.Has_Meter_Reading_Type._instance_identifier = self.get_identifier()
self.Has_Meter_Reading._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only (Get_Current_Meter_Value_Command or Get_Meter_Data_Command) or Get_Meter_History_Command), min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
Has_Meter_Reading_Type: RelationField = RelationField(
name='Has_Meter_Reading_Type',
rule='only (Commodity or Property)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Reading Type Of A Measurement (E.G., Water,
Gas, Pressure , Energy , Power, Etc.)
"""
Has_Meter_Reading: RelationField = RelationField(
name='Has_Meter_Reading',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Metering Function And The Measurement Of The
Reading
"""
class Micro_Renewable(Function_Related):
"""
A Device That Generates Renewable Energy From Natural Resources Such As Teh
Sun, Wind And Water
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Energyefficiency]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Energyefficiency())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Energyefficiency, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Multimedia(Function_Related):
"""
A Device Designed To Display, Store, Record Or Play Multimedia Content Such
As Audio, Images, Animation, Video
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Entertainment]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Entertainment())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Entertainment, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Network(Function_Related):
"""
A Device Used To Connect Other Devices In A Network, Such As Hub, Switch Or
Router In A Local Area Network (Lan).
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Notify_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Off_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[On_Off_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only On_Off_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class On_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[On_Off_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only On_Off_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class On_Off_Function(Actuating_Function):
"""
An Actuating Function That Allows To Switch On And Off An Actuator
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Off_Command], [On_Command], [Toggle_Command]]), ('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only (Off_Command or On_Command) or Toggle_Command), min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class Open_Close_Function(Actuating_Function):
"""
An Actuating Function That Allows To Open And Close A Device
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Close_Command], [Open_Command]]), ('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only (Close_Command or Open_Command), min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class Open_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Open_Close_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Open_Close_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Pause_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Profile(Thing):
"""
A Specification Associated To A Device To Collect Information About A
Certain Property (E.G., Energy) Or Commodity (E.G.Water) For Optimizing Its
Usage In The Home, Office Or Building In Which The Device Is Located. This
Specification Is About A Certain Property Or Commodity (Saref:Isabout), Can
Be Calculated Over A Time Span (Saref:Hastime ) And Can Be Associated To
Some Costs (Saref:Hasprice). An Example Is The Power Profile Defined In The
Saref4Ener Extension That Can Be Associated To A Device For Optimizing The
Energy Efficiency In The Home, Office Or Building In Which The Device Is
Located.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Consists_Of._rules = [('only', [[Profile]])]
self.Has_Price._rules = [('only', [[Price]])]
self.Has_Time._rules = [('only', [[Time]])]
self.Isabout._rules = [('only', [[Commodity], [Property]])]
self.Consists_Of._instance_identifier = self.get_identifier()
self.Has_Price._instance_identifier = self.get_identifier()
self.Has_Time._instance_identifier = self.get_identifier()
self.Isabout._instance_identifier = self.get_identifier()
# Relation fields
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Has_Price: RelationField = RelationField(
name='Has_Price',
rule='only Price',
semantic_manager=semantic_manager)
"""
A Relationships Indentifying The Price Associated To An Entity
"""
Has_Time: RelationField = RelationField(
name='Has_Time',
rule='only Time',
semantic_manager=semantic_manager)
"""
A Relationship To Associate Time Information To An Entity
"""
Isabout: RelationField = RelationField(
name='Isabout',
rule='only (Commodity or Property)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying What An Entity, Such As A Profile, Is About
"""
class Property(Thing):
"""
Anything That Can Be Sensed, Measured Or Controlled In Households, Common
Public Buildings Or Offices. We Propose Here A List Of Properties That Are
Relevant For The Purpose Of Saref, But This List Can Be Extended.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Energy(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value Measured In An Energy Unit (Such As Kilowatt_Hour Or
Watt_Hour). Furter Specializations Of The Saref:Energy Class Can Be Found In
The Saref4Ener Extension, Where Classes Such As Energymax, Energymin And
Energyexpected Are Defined.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Humidity(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Humidity Unit
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Light(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Illuminance Unit (Lux)
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Motion(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Unit Of Measure For Motion
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Occupancy(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value (Saref:Hasvalue Property) That Is Measured In A Unit Of
Measure For Occupancy
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Power(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Power Unit (Such As Watt Or Kilowatt).
Further Specializations Of The Saref:Power Class Can Be Found In The
Saref4Ener Extension, Where Classes Such As Powermax, Powermin And
Powerexpected Are Defined.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Pressure(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Pressure Unit (Bar Or Pascal)
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Price(Property):
"""
A Saref:Property Crelated To Some Measurements That Are Characterized By A
Certain Value That Is Measured Using Saref:Currency
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Sensing_Function(Function):
"""
A Function That Allows To Transmit Data From Sensors, Such As Measurement
Values (E.G., Temperature) Or Sensing Data (E.G., Occupancy)
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Get_Sensing_Data_Command]]), ('min|1', [[Command]])]
self.Has_Sensing_Range_._rules = [('some', [[Measurement]])]
self.Has_Sensor_Type._rules = [('only', [[Property]])]
self.Has_Command._instance_identifier = self.get_identifier()
self.Has_Sensing_Range_._instance_identifier = self.get_identifier()
self.Has_Sensor_Type._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only Get_Sensing_Data_Command, min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
Has_Sensing_Range_: RelationField = RelationField(
name='Has_Sensing_Range_',
rule='some Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Sensing Function And A Measurement Identifying The
Range Of A Sensor Detection
"""
Has_Sensor_Type: RelationField = RelationField(
name='Has_Sensor_Type',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Sensing Type Of A Sensor Detection (I.E.,
Temperature, Occupancy, Humidity, Motion , Smoke, Pressure, Etc.)
"""
class Sensor(Function_Related):
"""
A Device That Detects And Responds To Events Or Changes In The Physical
Environment Such As Light, Motion, Or Temperature Changes. Further, A Device
Of Category Saref:Sensor That Performs A Saref:Sensingfunction.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Sensing_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Sensing_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Service(Thing):
"""
A Service Is A Representation Of A Function To A Network That Makes The
Function Discoverable, Registerable, Remotely Controllable By Other Devices
In The Network. A Service Can Represent One Or More Functions. A Service Is
Offered By A Device That Wants (A Certain Set Of) Its Function(S) To Be
Discoverable, Registerable, Remotely Controllable By Other Devices In The
Network. A Service Must Specify The Device That Is Offering The Service And
The Function(S) To Be Represented.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Offered_By._rules = [('min|1', [[Device]])]
self.Represents._rules = [('min|1', [[Function]])]
self.Is_Offered_By._instance_identifier = self.get_identifier()
self.Represents._instance_identifier = self.get_identifier()
# Relation fields
Is_Offered_By: RelationField = RelationField(
name='Is_Offered_By',
rule='min 1 Device',
inverse_of=['Offers'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Service And A Device That Offers The Service
"""
Represents: RelationField = RelationField(
name='Represents',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Between A Service And A Function.
"""
class Set_Level_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Multi_Level_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Multi_Level_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Set_Absolute_Level_Command(Set_Level_Command):
"""
A Type Of Set Level Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Multi_Level_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Multi_Level_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Set_Relative_Level_Command(Set_Level_Command):
"""
A Type Of Set Level Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Multi_Level_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Multi_Level_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Smoke(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Unit Of Measure For Smoke
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Smoke_Sensor(Sensor):
"""
A Device That Consists Of A Sensor, Has Category Saref:Sensor, Performs The
Saref:Sensingfunction And Saref:Eventfunction (Which Notifies That A Certain
Threshold Has Been Exceeded), And Is Used For The Purpose Of Sensing A
Property Of Type Saref:Smoke.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Safety]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('some', [[Sensor]]), ('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Event_Function]]), ('some', [[Sensing_Function]]), ('some', [[Sensing_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('some', [[Smoke]]), ('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Safety())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Safety, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='some Sensor, only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Event_Function, some Sensing_Function, some Sensing_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='some Smoke, only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Start_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Start_Stop_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Start_Stop_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Start_Stop_Function(Actuating_Function):
"""
An Actuating Function That Allows To Start And Stop A Device
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Command._rules = [('only', [[Start_Command], [Stop_Command]]), ('min|1', [[Command]])]
self.Has_Command._instance_identifier = self.get_identifier()
# Relation fields
Has_Command: RelationField = RelationField(
name='Has_Command',
rule='only (Start_Command or Stop_Command), min 1 Command',
inverse_of=['Is_Command_Of'],
semantic_manager=semantic_manager)
"""
A Relationship Between An Entity (Such As A Function) And A Command
"""
class State(Thing):
"""
The State In Which A Device Can Be Found, E.G, On/Off/Standby, Or
Online/Offline. We Propose Here A List Of States That Are Relevant For The
Purpose Of Saref, But This List Can Be Extended.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Multi_Level_State(State):
"""
A Type Of State
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class On_Off_State(State):
"""
A Type Of State
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Off_State(On_Off_State):
"""
The State Of A Device That Is On
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class On_State(On_Off_State):
"""
The State Of A Device That Is Off
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Open_Close_State(State):
"""
A Type Of State
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Close_State(Open_Close_State):
"""
The State Of A Device That Is Close
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Open_State(Open_Close_State):
"""
The State Of A Device That Is Open
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Start_Stop_State(State):
"""
A Type Of State
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Start_State(Start_Stop_State):
"""
The State Of A Device That Is Started
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Step_Down_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Multi_Level_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Multi_Level_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Step_Up_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Multi_Level_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Multi_Level_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Stop_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[Start_Stop_State]]), ('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only Start_Stop_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Stop_State(Start_Stop_State):
"""
The State Of A Device That Is Stopped
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
class Storage(Energy_Related):
"""
A Type Of Energy-Related Device That Stores Energy
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Switch(Actuator):
"""
A Device Of Category Saref:Actuator That Performs An Actuating Function Of
Type Saref:Onofffunction Or Saref:Openclosefunction
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Actuating_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Actuating_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Door_Switch(Switch):
"""
A Device Of Category Saref:Actuator That Consists Of A Switch, Accomplishes
The Task Saref:Safety, Performs The Saref:Openclosefunction, Is Used For
Controlling A Door, And Can Be Found In The State Saref:Openclosestate.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Safety]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('some', [[Switch]]), ('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Open_Close_Function]]), ('some', [[Actuating_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('some', [[Open_Close_State]]), ('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Safety())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Safety, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='some Switch, only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Open_Close_Function, some Actuating_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='some Open_Close_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Light_Switch(Switch):
"""
A Device Of Category Saref:Actuator That Consists Of A Switch, Accomplishes
The Task Saref:Lighting, Performs The Saref:Onofffunction, Measures The
Property Saref:Light, And Can Be Found In The State Saref:Onoffstate. It Can
Offer A Switch On Service.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Lighting]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('some', [[Switch]]), ('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[On_Off_Function]]), ('some', [[Actuating_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('some', [[On_Off_State]]), ('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('some', [[Light]]), ('only', [[Property]])]
self.Offers._rules = [('some', [[Switch_On_Service]]), ('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Lighting())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Lighting, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='some Switch, only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some On_Off_Function, some Actuating_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='some On_Off_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='some Light, only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='some Switch_On_Service, only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Switch_On_Service(Service):
"""
A Type Of Service That Represents An On/Off Function To The Network
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Offered_By._rules = [('some', [[Light_Switch]]), ('min|1', [[Device]])]
self.Represents._rules = [('some', [[On_Off_Function]]), ('min|1', [[Function]])]
self.Is_Offered_By._instance_identifier = self.get_identifier()
self.Represents._instance_identifier = self.get_identifier()
# Relation fields
Is_Offered_By: RelationField = RelationField(
name='Is_Offered_By',
rule='some Light_Switch, min 1 Device',
inverse_of=['Offers'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Service And A Device That Offers The Service
"""
Represents: RelationField = RelationField(
name='Represents',
rule='some On_Off_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Between A Service And A Function.
"""
class Task(Thing):
"""
The Goal For Which A Device Is Designed (From A User Perspective). For
Example, A Washing Machine Is Designed For The Task Of Washing. We Propose
Here A List Of Tasks That Are Relevant For The Purpose Of Saref, But This
List Can Be Extended.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Accomplished_By._rules = [('min|1', [[Device]])]
self.Is_Accomplished_By._instance_identifier = self.get_identifier()
# Relation fields
Is_Accomplished_By: RelationField = RelationField(
name='Is_Accomplished_By',
rule='min 1 Device',
inverse_of=['Accomplishes'],
semantic_manager=semantic_manager)
"""
A Relationship Indentifying The Task Accomplished By A Certain Entity (E.G.,
A Device)
"""
class Temperature(Property):
"""
A Saref:Property Related To Some Measurements That Are Characterized By A
Certain Value That Is Measured In A Temperature Unit (Degree_Celsius,
Degree_Fahrenheit, Or Degree_Kelvin)
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Temperature_Sensor(Sensor):
"""
A Device That Consists Of A Sensor, Has Category Saref:Sensor, Performs The
Saref:Sensingfunction And Is Used For The Purpose Of Sensing A Property Of
Type Saref:Temperature
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Comfort]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('some', [[Sensor]]), ('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Sensing_Function]]), ('some', [[Sensing_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('only', [[Profile]])]
self.Has_State._rules = [('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('some', [[Temperature]]), ('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Comfort())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Comfort, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='some Sensor, only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Sensing_Function, some Sensing_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='some Temperature, only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Time(Property):
"""
A Saref:Property That Allows To Specify The Time Concept In Terms Of
Instants Or Intervals According To The Imported W3C Time Ontology.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Is_Controlled_By_Device._rules = [('only', [[Device]])]
self.Is_Measured_By_Device._rules = [('only', [[Device]])]
self.Relates_To_Measurement._rules = [('only', [[Measurement]])]
self.Is_Controlled_By_Device._instance_identifier = self.get_identifier()
self.Is_Measured_By_Device._instance_identifier = self.get_identifier()
self.Relates_To_Measurement._instance_identifier = self.get_identifier()
# Relation fields
Is_Controlled_By_Device: RelationField = RelationField(
name='Is_Controlled_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Control A Certain Property
"""
Is_Measured_By_Device: RelationField = RelationField(
name='Is_Measured_By_Device',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Devices That Can Measure A Certain Property
"""
Relates_To_Measurement: RelationField = RelationField(
name='Relates_To_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relationship Between A Property And The Measurements It Relates To
"""
class Toggle_Command(Command):
"""
A Type Of Command
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Acts_Upon._rules = [('only', [[State]])]
self.Is_Command_Of._rules = [('min|1', [[Function]])]
self.Acts_Upon._instance_identifier = self.get_identifier()
self.Is_Command_Of._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
# Relation fields
Acts_Upon: RelationField = RelationField(
name='Acts_Upon',
rule='only State',
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A State
"""
Is_Command_Of: RelationField = RelationField(
name='Is_Command_Of',
rule='min 1 Function',
inverse_of=['Has_Command'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Command And A Function.
"""
class Washing_Machine(Appliance, Load):
"""
A Device Of Category Saref:Appliance And Saref:Load That Accomplishes The
Task Saref:Washing, Performs An Actuating Function Of Type
Saref:Startstopfunction, Can Be Found In The State Saref:Startstopstate, And
Can Have A Saref:Profile That Characterizes Its Energy Consumption.
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
if not is_initialised:
self.Has_Description._rules = [('max|1', [['string']])]
self.Has_Manufacturer._rules = [('max|1', [['string']])]
self.Has_Model._rules = [('max|1', [['string']])]
self.Accomplishes._rules = [('value', [[Washing]]), ('min|1', [[Task]])]
self.Consists_Of._rules = [('only', [[Device]])]
self.Controls_Property._rules = [('only', [[Property]])]
self.Has_Function._rules = [('some', [[Start_Stop_Function]]), ('min|1', [[Function]])]
self.Has_Profile._rules = [('some', [[Profile]]), ('only', [[Profile]])]
self.Has_State._rules = [('some', [[Start_Stop_State]]), ('only', [[State]])]
self.Has_Typical_Consumption._rules = [('only', [[Energy], [Power]])]
self.Is_Used_For._rules = [('only', [[Commodity]])]
self.Makes_Measurement._rules = [('only', [[Measurement]])]
self.Measures_Property._rules = [('only', [[Property]])]
self.Offers._rules = [('only', [[Service]])]
self.Accomplishes._instance_identifier = self.get_identifier()
self.Consists_Of._instance_identifier = self.get_identifier()
self.Controls_Property._instance_identifier = self.get_identifier()
self.Has_Function._instance_identifier = self.get_identifier()
self.Has_Profile._instance_identifier = self.get_identifier()
self.Has_State._instance_identifier = self.get_identifier()
self.Has_Typical_Consumption._instance_identifier = self.get_identifier()
self.Is_Used_For._instance_identifier = self.get_identifier()
self.Makes_Measurement._instance_identifier = self.get_identifier()
self.Measures_Property._instance_identifier = self.get_identifier()
self.Offers._instance_identifier = self.get_identifier()
self.Has_Description._instance_identifier = self.get_identifier()
self.Has_Manufacturer._instance_identifier = self.get_identifier()
self.Has_Model._instance_identifier = self.get_identifier()
self.Accomplishes.add(Washing())
# Data fields
Has_Description: DataField = DataField(
name='Has_Description',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Providing A Description Of An Entity (E.G., Device)
"""
Has_Manufacturer: DataField = DataField(
name='Has_Manufacturer',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Manufacturer Of An Entity (E.G., Device)
"""
Has_Model: DataField = DataField(
name='Has_Model',
rule='max 1 string',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Model Of An Entity (E.G., Device)
"""
# Relation fields
Accomplishes: RelationField = RelationField(
name='Accomplishes',
rule='value Washing, min 1 Task',
inverse_of=['Is_Accomplished_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Certain Entity (E.G., A Device) And The Task It
Accomplishes
"""
Consists_Of: RelationField = RelationField(
name='Consists_Of',
rule='only Device',
semantic_manager=semantic_manager)
"""
A Relationship Indicating A Composite Entity That Consists Of Other Entities
(E.G., A Temperature/Humidity Sensor That Consists Of A Temperature Sensor
And A Humidity Sensor)
"""
Controls_Property: RelationField = RelationField(
name='Controls_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Controlled By A Certain
Device
"""
Has_Function: RelationField = RelationField(
name='Has_Function',
rule='some Start_Stop_Function, min 1 Function',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of Function Of A Device
"""
Has_Profile: RelationField = RelationField(
name='Has_Profile',
rule='some Profile, only Profile',
semantic_manager=semantic_manager)
"""
A Relationship Associating A Profile To A Certain Entity (E.G., A Device)
"""
Has_State: RelationField = RelationField(
name='Has_State',
rule='some Start_Stop_State, only State',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Type Of State Of A Device
"""
Has_Typical_Consumption: RelationField = RelationField(
name='Has_Typical_Consumption',
rule='only (Energy or Power)',
semantic_manager=semantic_manager)
"""
A Relationship Identifying The Typical (Energy Or Power) Consumption Of A
Device
"""
Is_Used_For: RelationField = RelationField(
name='Is_Used_For',
rule='only Commodity',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Purpose For Which A Device Is Used For (E.G.,
Controlling A Commodity)
"""
Makes_Measurement: RelationField = RelationField(
name='Makes_Measurement',
rule='only Measurement',
semantic_manager=semantic_manager)
"""
A Relation Between A Device And The Measurements It Makes. Such Measurement
Will Link Together The Value Of The Measurement, Its Unit Of Measure And The
Property To Which It Relates.
"""
Measures_Property: RelationField = RelationField(
name='Measures_Property',
rule='only Property',
semantic_manager=semantic_manager)
"""
A Relationship Specifying The Property That Can Be Measured By A Certain
Device
"""
Offers: RelationField = RelationField(
name='Offers',
rule='only Service',
inverse_of=['Is_Offered_By'],
semantic_manager=semantic_manager)
"""
A Relationship Between A Device And A Service
"""
class Water(Commodity):
"""
A Type Of Commodity
Source:
https://w3id.org/saref (saref.ttl)
"""
def __init__(self, *args, **kwargs):
is_initialised = 'id' in self.__dict__
super().__init__(*args, **kwargs)
# ---------Individuals--------- #
class Individual1(SemanticIndividual):
_parent_classes: List[type] = [Class2, Class1]
class Individual2(SemanticIndividual):
_parent_classes: List[type] = [Class1]
class Individual3(SemanticIndividual):
_parent_classes: List[type] = [Class2, Class1, Class3]
class Individual4(SemanticIndividual):
_parent_classes: List[type] = [Class1, Class2]
class United_States_Dollar(SemanticIndividual):
_parent_classes: List[type] = [Currency]
class Bar(SemanticIndividual):
_parent_classes: List[type] = [Pressure_Unit]
class Degree_Celsius(SemanticIndividual):
_parent_classes: List[type] = [Temperature_Unit]
class Degree_Fahrenheit(SemanticIndividual):
_parent_classes: List[type] = [Temperature_Unit]
class Euro(SemanticIndividual):
_parent_classes: List[type] = [Currency]
class Kelvin(SemanticIndividual):
_parent_classes: List[type] = [Temperature_Unit]
class Kilowatt(SemanticIndividual):
_parent_classes: List[type] = [Power_Unit]
class Kilowatt_Hour(SemanticIndividual):
_parent_classes: List[type] = [Energy_Unit]
class Lux(SemanticIndividual):
_parent_classes: List[type] = [Illuminance_Unit]
class Pascal(SemanticIndividual):
_parent_classes: List[type] = [Pressure_Unit]
class Great_Britain_Pound_Sterling(SemanticIndividual):
_parent_classes: List[type] = [Currency]
class Watt(SemanticIndividual):
_parent_classes: List[type] = [Power_Unit]
class Cleaning(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Close(SemanticIndividual):
_parent_classes: List[type] = [Close_Command, Close_State]
class Comfort(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Drying(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Energyefficiency(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Entertainment(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Get_Current_Meter_Value(SemanticIndividual):
_parent_classes: List[type] = [Get_Current_Meter_Value_Command]
class Get_Meter_Data(SemanticIndividual):
_parent_classes: List[type] = [Get_Meter_Data_Command]
class Get_Meter_History(SemanticIndividual):
_parent_classes: List[type] = [Get_Meter_History_Command]
class Get_Sensing_Data(SemanticIndividual):
_parent_classes: List[type] = [Get_Sensing_Data_Command]
class Lighting(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Meter_Reading(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Notify(SemanticIndividual):
_parent_classes: List[type] = [Notify_Command]
class Off_(SemanticIndividual):
_parent_classes: List[type] = [Off_Command, Off_State]
class On(SemanticIndividual):
_parent_classes: List[type] = [On_Command, On_State]
class Open(SemanticIndividual):
_parent_classes: List[type] = [Open_Command, Open_State]
class Pause(SemanticIndividual):
_parent_classes: List[type] = [Pause_Command]
class Safety(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Set_Absolute_Level(SemanticIndividual):
_parent_classes: List[type] = [Set_Absolute_Level_Command]
class Set_Relative_Level(SemanticIndividual):
_parent_classes: List[type] = [Set_Relative_Level_Command]
class Start(SemanticIndividual):
_parent_classes: List[type] = [Start_Command, Start_State]
class Step_Down(SemanticIndividual):
_parent_classes: List[type] = [Step_Down_Command]
class Step_Up(SemanticIndividual):
_parent_classes: List[type] = [Step_Up_Command]
class Stop(SemanticIndividual):
_parent_classes: List[type] = [Stop_Command, Stop_State]
class Toggle(SemanticIndividual):
_parent_classes: List[type] = [Toggle_Command]
class Washing(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Wellbeing(SemanticIndividual):
_parent_classes: List[type] = [Task]
class Watt_Hour(SemanticIndividual):
_parent_classes: List[type] = [Energy_Unit]
# ---------Datatypes--------- #
semantic_manager.datatype_catalogue = {
'customDataType1': {
'type': 'enum',
'enum_values': ['0', '15', '30'],
},
'customDataType2': {
'type': 'string',
},
'customDataType3': {
'type': 'string',
},
'customDataType4': {
'type': 'enum',
'enum_values': ['1', '2', '3', '4'],
},
'rational': {
'type': 'number',
'number_decimal_allowed': True,
},
'real': {
'type': 'number',
},
'PlainLiteral': {
'type': 'string',
},
'XMLLiteral': {
'type': 'string',
},
'Literal': {
'type': 'string',
},
'anyURI': {
'type': 'string',
},
'base64Binary': {
'type': 'string',
},
'boolean': {
'type': 'enum',
'enum_values': ['True', 'False'],
},
'byte': {
'type': 'number',
'number_range_min': -128,
'number_range_max': 127,
'number_has_range': True,
},
'dateTime': {
'type': 'date',
},
'dateTimeStamp': {
'type': 'date',
},
'decimal': {
'type': 'number',
'number_decimal_allowed': True,
},
'double': {
'type': 'number',
'number_decimal_allowed': True,
},
'float': {
'type': 'number',
'number_decimal_allowed': True,
},
'hexBinary': {
'allowed_chars': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'],
'type': 'string',
},
'int': {
'type': 'number',
'number_range_min': -2147483648,
'number_range_max': 2147483647,
'number_has_range': True,
},
'integer': {
'type': 'number',
},
'language': {
'type': 'string',
},
'long': {
'type': 'number',
'number_range_min': -9223372036854775808,
'number_range_max': 9223372036854775807,
'number_has_range': True,
},
'Name': {
'type': 'string',
},
'NCName': {
'forbidden_chars': [':'],
'type': 'string',
},
'negativeInteger': {
'type': 'number',
'number_range_max': -1,
'number_has_range': True,
},
'NMTOKEN': {
'type': 'string',
},
'nonNegativeInteger': {
'type': 'number',
'number_range_min': 0,
'number_has_range': True,
},
'nonPositiveInteger': {
'type': 'number',
'number_range_max': -1,
'number_has_range': True,
},
'normalizedString': {
'type': 'string',
},
'positiveInteger': {
'type': 'number',
'number_range_min': 0,
'number_has_range': True,
},
'short': {
'type': 'number',
'number_range_min': -32768,
'number_range_max': 32767,
'number_has_range': True,
},
'string': {
'type': 'string',
},
'token': {
'type': 'string',
},
'unsignedByte': {
'type': 'number',
'number_range_min': 0,
'number_range_max': 255,
'number_has_range': True,
},
'unsignedInt': {
'type': 'number',
'number_range_min': 0,
'number_range_max': 4294967295,
'number_has_range': True,
},
'unsignedLong': {
'type': 'number',
'number_range_min': 0,
'number_range_max': 18446744073709551615,
'number_has_range': True,
},
'unsignedShort': {
'type': 'number',
'number_range_min': 0,
'number_range_max': 65535,
'number_has_range': True,
},
}
class customDataType1(str, Enum):
value_0 = '0'
value_15 = '15'
value_30 = '30'
class customDataType4(str, Enum):
value_1 = '1'
value_2 = '2'
value_3 = '3'
value_4 = '4'
# ---------Class Dict--------- #
semantic_manager.class_catalogue = {
'Actuating_Function': Actuating_Function,
'Actuator': Actuator,
'Appliance': Appliance,
'Building_Related': Building_Related,
'Class1': Class1,
'Class123': Class123,
'Class13': Class13,
'Class1a': Class1a,
'Class1aa': Class1aa,
'Class1b': Class1b,
'Class2': Class2,
'Class3': Class3,
'Class3a': Class3a,
'Class3aa': Class3aa,
'Class4': Class4,
'Close_Command': Close_Command,
'Close_State': Close_State,
'Coal': Coal,
'Command': Command,
'Commodity': Commodity,
'Currency': Currency,
'Device': Device,
'Door_Switch': Door_Switch,
'Electricity': Electricity,
'Energy': Energy,
'Energy_Meter': Energy_Meter,
'Energy_Related': Energy_Related,
'Energy_Unit': Energy_Unit,
'Event_Function': Event_Function,
'Function': Function,
'Function_Related': Function_Related,
'Gas': Gas,
'Generator': Generator,
'Gertrude': Gertrude,
'Get_Command': Get_Command,
'Get_Current_Meter_Value_Command': Get_Current_Meter_Value_Command,
'Get_Meter_Data_Command': Get_Meter_Data_Command,
'Get_Meter_History_Command': Get_Meter_History_Command,
'Get_Sensing_Data_Command': Get_Sensing_Data_Command,
'Humidity': Humidity,
'Hvac': Hvac,
'Illuminance_Unit': Illuminance_Unit,
'Level_Control_Function': Level_Control_Function,
'Light': Light,
'Light_Switch': Light_Switch,
'Lighting_Device': Lighting_Device,
'Load': Load,
'Measurement': Measurement,
'Meter': Meter,
'Metering_Function': Metering_Function,
'Micro_Renewable': Micro_Renewable,
'Motion': Motion,
'Multi_Level_State': Multi_Level_State,
'Multimedia': Multimedia,
'Network': Network,
'Notify_Command': Notify_Command,
'Occupancy': Occupancy,
'Off_Command': Off_Command,
'Off_State': Off_State,
'On_Command': On_Command,
'On_Off_Function': On_Off_Function,
'On_Off_State': On_Off_State,
'On_State': On_State,
'Open_Close_Function': Open_Close_Function,
'Open_Close_State': Open_Close_State,
'Open_Command': Open_Command,
'Open_State': Open_State,
'Pause_Command': Pause_Command,
'Power': Power,
'Power_Unit': Power_Unit,
'Pressure': Pressure,
'Pressure_Unit': Pressure_Unit,
'Price': Price,
'Profile': Profile,
'Property': Property,
'Sensing_Function': Sensing_Function,
'Sensor': Sensor,
'Service': Service,
'Set_Absolute_Level_Command': Set_Absolute_Level_Command,
'Set_Level_Command': Set_Level_Command,
'Set_Relative_Level_Command': Set_Relative_Level_Command,
'Smoke': Smoke,
'Smoke_Sensor': Smoke_Sensor,
'Start_Command': Start_Command,
'Start_State': Start_State,
'Start_Stop_Function': Start_Stop_Function,
'Start_Stop_State': Start_Stop_State,
'State': State,
'Step_Down_Command': Step_Down_Command,
'Step_Up_Command': Step_Up_Command,
'Stop_Command': Stop_Command,
'Stop_State': Stop_State,
'Storage': Storage,
'Switch': Switch,
'Switch_On_Service': Switch_On_Service,
'Task': Task,
'Temperature': Temperature,
'Temperature_Sensor': Temperature_Sensor,
'Temperature_Unit': Temperature_Unit,
'Thing': Thing,
'Time': Time,
'Toggle_Command': Toggle_Command,
'Washing_Machine': Washing_Machine,
'Water': Water,
}
semantic_manager.individual_catalogue = {
'Individual1': Individual1,
'Individual2': Individual2,
'Individual3': Individual3,
'Individual4': Individual4,
'United_States_Dollar': United_States_Dollar,
'Bar': Bar,
'Degree_Celsius': Degree_Celsius,
'Degree_Fahrenheit': Degree_Fahrenheit,
'Euro': Euro,
'Kelvin': Kelvin,
'Kilowatt': Kilowatt,
'Kilowatt_Hour': Kilowatt_Hour,
'Lux': Lux,
'Pascal': Pascal,
'Great_Britain_Pound_Sterling': Great_Britain_Pound_Sterling,
'Watt': Watt,
'Cleaning': Cleaning,
'Close': Close,
'Comfort': Comfort,
'Drying': Drying,
'Energyefficiency': Energyefficiency,
'Entertainment': Entertainment,
'Get_Current_Meter_Value': Get_Current_Meter_Value,
'Get_Meter_Data': Get_Meter_Data,
'Get_Meter_History': Get_Meter_History,
'Get_Sensing_Data': Get_Sensing_Data,
'Lighting': Lighting,
'Meter_Reading': Meter_Reading,
'Notify': Notify,
'Off_': Off_,
'On': On,
'Open': Open,
'Pause': Pause,
'Safety': Safety,
'Set_Absolute_Level': Set_Absolute_Level,
'Set_Relative_Level': Set_Relative_Level,
'Start': Start,
'Step_Down': Step_Down,
'Step_Up': Step_Up,
'Stop': Stop,
'Toggle': Toggle,
'Washing': Washing,
'Wellbeing': Wellbeing,
'Watt_Hour': Watt_Hour,
}
| 29.788979
| 165
| 0.73243
| 222,942
| 0.963569
| 0
| 0
| 0
| 0
| 0
| 0
| 88,913
| 0.384288
|
549fd848dd75d3c337cc6b1655249d58340ef912
| 2,744
|
py
|
Python
|
plotting/trackTurnOn.py
|
will-fawcett/trackerSW
|
fc097b97539d0b40a15e1d6e112f4048cb4122b4
|
[
"MIT"
] | null | null | null |
plotting/trackTurnOn.py
|
will-fawcett/trackerSW
|
fc097b97539d0b40a15e1d6e112f4048cb4122b4
|
[
"MIT"
] | null | null | null |
plotting/trackTurnOn.py
|
will-fawcett/trackerSW
|
fc097b97539d0b40a15e1d6e112f4048cb4122b4
|
[
"MIT"
] | null | null | null |
from utils import prepareLegend
from colours import colours
from ROOT import *
gROOT.SetBatch(1)
gStyle.SetPadLeftMargin(0.15) # increase space for left margin
gStyle.SetPadBottomMargin(0.15) # increase space for left margin
gStyle.SetGridStyle(3)
gStyle.SetGridColor(kGray)
gStyle.SetPadTickX(1) # add tics on top x
gStyle.SetPadTickY(1) # add tics on right y
OUTPUT_DIR = 'plots/'
REBIN = 2
def main():
ifile = TFile.Open('/Users/Will/Documents/fcc/trackerSW/delphes/output_ttbar_mu1000.root')
colourDef = Colours()
truthTrackPt = ifile.Get('truthTrack100')
truthTrackPt.Rebin(REBIN)
#truthTrackPt = TH1D('tracks', '', 100, 0, 100)
'''
for bin in range(truthTrackPt_1000.GetNbinsX()):
if bin > 100: continue
truthTrackPt.SetBinContent(bin, truthTrackPt_1000.GetBinContent(bin))
truthTrackPt_1000.GetXaxis().SetRangeUser(0,200)
truthTrackPt_1000.Draw()
truthTrackPt.SetLineColor(kGreen)
truthTrackPt.Draw('same')
can.SaveAs('test.pdf')
'''
can = TCanvas('can', 'can', 500, 500)
line = TF1('line', '1', 0, 100)
line.SetLineColor(kGray)
tGraphs = {}
leg = prepareLegend('bottomRight', [0.7, 0.15, 0.9, 0.35])
for i in range(0, 6):
ptCut = (i+1)*5
hName = 'truthTrackPt{0}'.format(ptCut)
print hName
ptAfterCut = ifile.Get(hName)
ptAfterCut.SetLineColor(kRed)
ptAfterCut.Rebin(REBIN)
can.SetLogy()
truthTrackPt.Draw()
ptAfterCut.Draw('same')
can.SaveAs(OUTPUT_DIR+'tracksPt{0}.pdf'.format(ptCut))
# to make turn on to TGraphAsymmErrors(numerator, denominator)
ratio = TGraphAsymmErrors(ptAfterCut, truthTrackPt)
can.SetLogy(0)
ratio.Draw('AP')
line.Draw('same')
xaxis = ratio.GetXaxis()
xaxis.SetRangeUser(0, ptCut*3)
xaxis.SetTitle('Truth track p_{T} [GeV]')
yaxis = ratio.GetYaxis()
yaxis.SetTitle('Efficiency')
can.SaveAs(OUTPUT_DIR+'turnOnPt{0}.pdf'.format(ptCut))
tGraphs[ptCut] = ratio
# now draw series of TGraphs
ptCuts = [5, 10, 15, 20]
colours = [colourDef.blue, colourDef.red, colourDef.orange, colourDef.purple]
for i, cut in enumerate(ptCuts):
gr = tGraphs[cut]
gr.SetLineColor(colours[i])
gr.SetMarkerColor(colours[i])
leg.AddEntry(gr, 'p_{T} > '+str(cut)+' GeV')
if i==0:
gr.Draw('APl')
gr.SetMinimum(0)
gr.GetXaxis().SetRangeUser(0, 45)
line.Draw('same')
gr.Draw('Psame')
else:
gr.Draw('Plsame')
leg.Draw()
can.SaveAs(OUTPUT_DIR+'trackTurnOn.pdf')
if __name__ == "__main__":
main()
| 27.717172
| 94
| 0.623178
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 889
| 0.32398
|
54a054f1ed42ee815b1ac8ae21d88b15ea91f8bb
| 154
|
py
|
Python
|
pybo/inits/__init__.py
|
hfukada/pybo
|
3be57adad901fcd8d45b8ee2af7c6032ab47611d
|
[
"BSD-2-Clause"
] | 115
|
2015-01-21T21:31:22.000Z
|
2021-08-08T17:10:16.000Z
|
pybo/inits/__init__.py
|
hfukada/pybo
|
3be57adad901fcd8d45b8ee2af7c6032ab47611d
|
[
"BSD-2-Clause"
] | 5
|
2016-02-24T16:00:01.000Z
|
2020-12-21T00:28:30.000Z
|
pybo/inits/__init__.py
|
hfukada/pybo
|
3be57adad901fcd8d45b8ee2af7c6032ab47611d
|
[
"BSD-2-Clause"
] | 35
|
2015-02-27T15:27:36.000Z
|
2020-08-19T07:43:53.000Z
|
"""
Initialization methods.
"""
# pylint: disable=wildcard-import
from .methods import *
from . import methods
__all__ = []
__all__ += methods.__all__
| 12.833333
| 33
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 64
| 0.415584
|
54a07034e31ea393994499d210b41085f8ae28cb
| 2,362
|
py
|
Python
|
src/Process/Process.py
|
mauriciocarvalho01/pln_api
|
06743f1ae9e084ad15f1c91b32eb3719344f4a4b
|
[
"MIT"
] | 1
|
2021-12-14T19:10:44.000Z
|
2021-12-14T19:10:44.000Z
|
src/Process/Process.py
|
mauriciocarvalho01/pln_api
|
06743f1ae9e084ad15f1c91b32eb3719344f4a4b
|
[
"MIT"
] | null | null | null |
src/Process/Process.py
|
mauriciocarvalho01/pln_api
|
06743f1ae9e084ad15f1c91b32eb3719344f4a4b
|
[
"MIT"
] | null | null | null |
import spacy
from nltk.tokenize import word_tokenize
from nltk.tokenize import sent_tokenize
from nltk.corpus import stopwords
from nltk.probability import FreqDist
from string import punctuation
from tqdm import tqdm
from rank_bm25 import BM25Okapi
import time
from collections import defaultdict
from heapq import nlargest
import nltk
nltk.download('punkt')
nltk.download('stopwords')
from operator import itemgetter
from .ProcessFiles import ProcessFiles
from src.Entity.ChatResponse import ChatResponse
from src.Entity.Files import Files
from .Thread import Thread
from .Resume import Resume
from .Tools import Tools
class Process:
def initProcess(database, process):
action = process['action']
print(action)
text = process['request_query']
file = process['file']
user_id = process['user_id']
print(user_id)
hash = Tools.encodeBase64(text)
file = Files.getFiles(database, file, user_id)
if len(file) == 0:
return {"status": "erro", "message": "Não achei nenhum arquivo cadastrado"}
process['type'] = file[0]['type']
process['hash'] = hash
chat_response = []
if action == 'query':
chat_response = ChatResponse.updateChatResponse(database, process)
if len(chat_response) > 0:
# print("chat_response")
# print(chat_response)
response = chat_response[0]
return response
else:
if action == "query":
db = database
Thread(db, process).start()
response = {"status": "learning", "message": "Ainda não sei a resposta, estou aprendendo...Pergunte - me novamente em instantes"}
return response
elif action == "resume":
resume = Resume.resumeFile(process)
# if text:
# resume = json.dumps(resume, indent = 4)
# insert = database.execute('INSERT INTO explain.chat_response (hash, text, response) VALUES (%s,%s, %s)', (hash, text, resume))
# if(insert):
# return resume
# else:
# return "Erro ao inserir texto"
return resume
else:
return "Não reconheço essa ação"
| 34.735294
| 148
| 0.600762
| 1,735
| 0.732686
| 0
| 0
| 0
| 0
| 0
| 0
| 622
| 0.262669
|
54a10b062decccd624d8a14f46543d84c61a99d9
| 176
|
py
|
Python
|
project_e/jobs/apps.py
|
ElectricFleming/project-e
|
cf05d2a835a09555e3dba5813d635d329684a71c
|
[
"bzip2-1.0.6"
] | null | null | null |
project_e/jobs/apps.py
|
ElectricFleming/project-e
|
cf05d2a835a09555e3dba5813d635d329684a71c
|
[
"bzip2-1.0.6"
] | 3
|
2020-01-30T03:47:26.000Z
|
2021-05-11T00:58:08.000Z
|
project_e/jobs/apps.py
|
effortless-electric/project-e
|
ae4e8415204319999ee2ecac248e2504ec1fff63
|
[
"bzip2-1.0.6"
] | 1
|
2019-12-27T22:45:45.000Z
|
2019-12-27T22:45:45.000Z
|
from django.apps import AppConfig
from django.utils.translation import gettext_lazy as _
class JobsConfig(AppConfig):
name = 'project_e.jobs'
verbose_name = _("Jobs")
| 25.142857
| 54
| 0.761364
| 85
| 0.482955
| 0
| 0
| 0
| 0
| 0
| 0
| 22
| 0.125
|
54a29568d20a9d3cd8819302aa5a4f6675a50ec6
| 3,080
|
py
|
Python
|
Final_plot/request_type(pie).py
|
ashutoshbhadke/weblog-visualizer
|
7fd10535fe0909291da194776b053eca1640b1e9
|
[
"MIT"
] | null | null | null |
Final_plot/request_type(pie).py
|
ashutoshbhadke/weblog-visualizer
|
7fd10535fe0909291da194776b053eca1640b1e9
|
[
"MIT"
] | null | null | null |
Final_plot/request_type(pie).py
|
ashutoshbhadke/weblog-visualizer
|
7fd10535fe0909291da194776b053eca1640b1e9
|
[
"MIT"
] | null | null | null |
import csv
from pylab import *
import matplotlib.pyplot as plt
count1=[]
req_data=[]
def get_request (str):
f=open('weblog.txt','r')
pdata=[]
req_data1=[]
data=csv.reader(f,delimiter=' ')
for row in data:
row[3]=row[3][1:]
row[3]=row[3].split(':')
row[3][1:4]=[':'.join(row[3][1:4])]
row[5]=row[5].split('/')
row[5][0]=row[5][0].split(' ')
#print(row[5][0][1])
row[4]=row[4][:5]
row[9]=row[9].split(' ')
row[9][1:15]=[':'.join(row[9][1:15])]
if row[5][0][1][:4].lower() == 'www.':
row[5][0][1]=row[5][0][1][4:]
pdata.append(row)
#for term in pdata:
# print(term)
for row in pdata:
#print(row[6])
item=row[6]
if row[5][0][1]==str:
req_data1.append(item)
if item in req_data:
continue
else:
if (row[5][0][1]==str):
req_data.append(row[6])
#print(ipdata1)
for row in req_data:
count1.append(req_data1.count(row))
print(count1)
f.close()
return count1;
def main():
count=[]
count=get_request('www.kinneryandrajan.com')
'''#this is for non bar plot
plt.ylabel('WWW.TWIBUZZ.COM')
#plt.xlabel("No of Hits by Different IP's")
#plt.xticks(count,ipdata)
plt.plot(count,'g*-',label='Hit Count', linewidth=2)''
#this is bar graph
#plt.xticks(count,ipdata,rotation='vertical')'''
'''import pylab as p
fig = p.figure()
ax = fig.add_subplot(1,1,1)
N=len(count)
ind=range(len(count))
ax.bar(ind, count, facecolor='blue', ecolor='black')
ax.set_ylabel('No of Hits')
ax.set_title("Hit count of Different IP's on www.twibuzz.com",fontstyle='italic')
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
majorLocator = MultipleLocator(1)
ax.xaxis.set_major_locator(majorLocator)
ax.set_xticklabels(req_data,rotation='vertical')
#ax.xaxis.set_linespacing(4)
#fig.autofmt_xdate()
p.show()
plt.bar(range(len(count)),count,align="center",width=0.5,alpha=0.5)
plt.ylabel('WWW.TWIBUZZ.COM')
plt.xlabel('No of Hits')
plt.set_xticklabels(count)
def autolabel(rects):
for rect in rects:
height = rect
plt.text(1.05*height, '%d'%int(height),
ha='center', va='bottom')
plt.show()
'''
figure(1, figsize=(6,6))
ax = axes([0.1, 0.1, 0.8, 0.8])
#explode=(1, 0.05, 1)
pie(count, labels=req_data,autopct='%1.1f%%', shadow=True, startangle=90)
title('Type of Request to www.kinneryandrajan.com', bbox={'facecolor':'0.8', 'pad':5})
show()
pass
if __name__ == '__main__':
main()
| 26.101695
| 91
| 0.500974
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,516
| 0.492208
|
54a40265eb0edbb4261d2c562a057abf3c76c839
| 5,979
|
py
|
Python
|
pandas/lib/excelRW.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | null | null | null |
pandas/lib/excelRW.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | 11
|
2021-02-08T20:45:23.000Z
|
2022-03-12T01:00:11.000Z
|
pandas/lib/excelRW.py
|
philip-shen/note_python
|
db0ad84af25464a22ac52e348960107c81e74a56
|
[
"MIT"
] | null | null | null |
## 2018/08/17 Initial
## 2018/08/18 Add CSV format
## 2018/08/23 Add def get_stockidxname_SeymourExcel(),def get_stockidx_SeymourExcel()
## def get_all_stockidx_SeymourExcel() from test_crawl.py
## 2018/09/06 Add value of column 'PBR' in def readExcel()
## 2018/10/27 Add exception handling in def readExcel(self,dir_execlfile)
## 2019/07/20 Add get_all_stockidxname_SeymourExcel, get_stockname_SeymourExcel and get_all_stockname_SeymourExcel
#################################################################
import xlrd
import xlwt
import xlutils.copy
import csv
import os
from logger import logger
class ExcelRW:
def readExcel(self,dir_execlfile):
try:
data = xlrd.open_workbook(dir_execlfile) # 打開一個Excel表格
table = data.sheets()[0] # 打開Excel表格的第一張表
nrows = table.nrows # 獲取每張表的行數
except FileNotFoundError as fnf_error:
print(fnf_error)
list_rtu_row_values=[]
for row in range(nrows): # 遍歷每一行
#print(table.row_values(row)) # 獲取每行的值
#if table.row_values(row)[11] != "合理價格": # 排除第一行後,獲取每行合理價格的值
if table.row_values(row)[10] != "價值比": # 排除第一行後,獲取每行價格比的值
#print(str(table.row_values(row)[1]).strip('.0'), table.row_values(row)[2], table.row_values(row)[11])
'''
list_row_values=[str(table.row_values(row)[1])[0:4], table.row_values(row)[2],
table.row_values(row)[10],#column "價值比"
table.row_values(row)[4]]#column 'PBR'
'''
#2019/02/16 Add 現金殖利率 by 低波固收操作模式
#2019/02/19 Correct from 現金殖利率 to 現金股利
#list_row_values=[str(table.row_values(row)[1])[0:4], table.row_values(row)[2],
#2019/07/20 Cause 低波固收追蹤股 contnet of '代碼' column excexx 4 digits
list_row_values=[str(table.row_values(row)[1]), table.row_values(row)[2],
table.row_values(row)[10],#column "價值比"
table.row_values(row)[4],#column 'PBR'
#table.row_values(row)[8]]#column '現金殖利率'
table.row_values(row)[7]]#column '現金股利'
list_rtu_row_values.append(list_row_values)
#print(list_rtu_row_values,list_row_values)
return list_rtu_row_values
def writeCSVbyTable(self,dir_csvfile,list_table):
# 開啟輸出的 CSV 檔案
with open(dir_csvfile, 'w', newline='') as csvfile:
# 建立 CSV 檔寫入器
writer = csv.writer(csvfile, delimiter=',')
# 寫入二維表格
writer.writerows(list_table)
def writeCSVbyRow(self,dir_csvfile,list_row):
# 開啟輸出的 CSV 檔案
with open(dir_csvfile, 'w', newline=',') as csvfile:
# 建立 CSV 檔寫入器
writer = csv.writer(csvfile, delimiter=' ')
# 寫入一列資料
writer.writerow(list_row)
def get_stockidxname_SeymourExcel(self,dirnamelog,excelfname):
#print('將讀取Excel file:', excelfname, '的資料')
logger.info('Read Excel file::{0}'.format(excelfname))
# Excel file including path
dirlog_ExcelFile=os.path.join(dirnamelog,excelfname)
list_row_value_price=self.readExcel(dirlog_ExcelFile)
list_rtu_stockidxname=[]
# Get stock idx and company name from Excel files
for list_row_value in list_row_value_price:
list_stockidx_name=[list_row_value[0],list_row_value[1]]
list_rtu_stockidxname.append(list_stockidx_name)
return list_rtu_stockidxname
def get_all_stockidxname_SeymourExcel(self,dir_log,list_excel_files):
list_rtu_all_stockidx_stockidxname=[]
for excel_file in list_excel_files:
list_stockidx_stockidxname = self.get_stockidxname_SeymourExcel(dir_log,excel_file)
list_rtu_all_stockidx_stockidxname.extend(list_stockidx_stockidxname)
return list_rtu_all_stockidx_stockidxname
def get_stockidx_SeymourExcel(self,dirnamelog,excelfname):
print('將讀取Excel file:', excelfname, '的資料')
#logging.error('將讀取Excel file: {}'.format(excelfname))
# Excel file including path
dirlog_ExcelFile=os.path.join(dirnamelog,excelfname)
list_row_value_price=self.readExcel(dirlog_ExcelFile)
list_rtu_stockidx=[]
# Get stock idx from Excel files
for list_row_value in list_row_value_price:
list_stockidx=[list_row_value[0]]
list_rtu_stockidx.append(list_stockidx)
return list_rtu_stockidx
def get_all_stockidx_SeymourExcel(self,dir_log,list_excel_files):
list_rtu_all_stockidx=[]
for excel_file in list_excel_files:
list_stockidx=self.get_stockidx_SeymourExcel(dir_log,excel_file)
list_rtu_all_stockidx.extend(list_stockidx)
return list_rtu_all_stockidx
def get_stockname_SeymourExcel(self,dirnamelog,excelfname):
print('將讀取Excel file:', excelfname, '的資料')
# Excel file including path
dirlog_ExcelFile=os.path.join(dirnamelog,excelfname)
list_row_value_price=self.readExcel(dirlog_ExcelFile)
list_rtu_stockidxname=[]
# Get company name from Excel files
for list_row_value in list_row_value_price:
list_stockidx_name=[list_row_value[1]]
list_rtu_stockidxname.append(list_stockidx_name)
return list_rtu_stockidxname
def get_all_stockname_SeymourExcel(self,dir_log,list_excel_files):
list_rtu_all_stockname=[]
for excel_file in list_excel_files:
list_stockname=self.get_stockname_SeymourExcel(dir_log,excel_file)
list_rtu_all_stockname.extend(list_stockname)
return list_rtu_all_stockname
| 40.398649
| 118
| 0.630373
| 5,714
| 0.901688
| 0
| 0
| 0
| 0
| 0
| 0
| 2,215
| 0.349534
|
54a4ba9c11d3248dceffbbc60702b2f7f2e73b4a
| 3,950
|
py
|
Python
|
launchpad2github.py
|
mleinart/launchpad2github
|
faade979a1f209dc1d25aa82a32f6342dbfe35b3
|
[
"MIT"
] | 2
|
2016-10-07T08:55:40.000Z
|
2017-08-30T16:43:57.000Z
|
launchpad2github.py
|
mleinart/launchpad2github
|
faade979a1f209dc1d25aa82a32f6342dbfe35b3
|
[
"MIT"
] | null | null | null |
launchpad2github.py
|
mleinart/launchpad2github
|
faade979a1f209dc1d25aa82a32f6342dbfe35b3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
import time
from getpass import getpass
from optparse import OptionParser
from termcolor import colored
from launchpadlib.launchpad import Launchpad
from github3 import login as github_login
from github3 import GitHubError
ACTIVE_STATUSES = [
"New",
"Confirmed",
"Triaged",
"In Progress"
]
IMPORTED_FIELDS = [
"owner",
"web_link",
"date_created",
"date_last_updated",
"tags",
]
def main(args):
usage = """%s: <lp project> <gh project>\n""" % (sys.argv[0],)
parser = OptionParser(usage=usage)
options, args = parser.parse_args(args=args)
if len(args) != 2:
parser.print_usage()
return 1
lp_project_name = args[0]
gh_project_name = args[1]
try:
gh_owner, gh_repo = gh_project_name.split('/')
except ValueError:
print "Unable to parse target Github repo: '%s'" % gh_project_name
print "Repo should be specified as <owner>/<repo>"
print "Authenticating with Launchpad"
launchpad = Launchpad.login_with(os.path.basename(sys.argv[0]), 'production')
print "Authenticating with Github"
github_user = raw_input("Github username: ")
github_pass = getpass("Github password: ")
try:
github = github_login(github_user, github_pass)
github.user()
except GitHubError:
raise SystemExit("Invalid Github login or problem contacting server")
# Validate launchpad project
try:
lp_project = launchpad.projects[lp_project_name]
except KeyError:
raise SystemExit("Unable to find project named '%s' on Launchpad" % lp_project_name)
# Validate github project
if github.repository(gh_owner, gh_repo) is None:
raise SystemExit("Unable to find Github project %s/%s" % (gh_owner, gh_repo))
# Begin migration
open_tasks = lp_project.searchTasks(status=ACTIVE_STATUSES)
for bug_task in open_tasks:
for field in IMPORTED_FIELDS:
print colored(field + ':', 'cyan') + colored(bug_task.bug.__getattr__(field), 'yellow')
print colored(bug_task.bug.description, 'yellow')
print
if confirm_or_exit(colored("Import?", 'cyan')):
title = bug_task.bug.title
description = format_description(bug_task.bug)
issue = github.create_issue(owner=gh_owner, repository=gh_repo, title=title, body=description)
for i, message in enumerate(bug_task.bug.messages):
if i == 0: continue # repeat of description
time.sleep(0.5)
comment = format_comment(message)
issue.create_comment(body=comment)
issue.add_labels('launchpad_import')
print colored("Created issue %d: %s" % (issue.number, issue.html_url), 'yellow')
if confirm_or_exit(colored("Close and update original?", 'cyan')):
bug_task.bug.newMessage(content="Migrated to Github: %s" % issue.html_url)
bug_task.status = "Won't Fix"
bug_task.bug.lp_save()
bug_task.lp_save()
def format_description(bug):
output = """#### Imported from %(web_link)s
|||
|----|----|
|Reported by|%(owner)s|
|Date Created|%(date_created)s|
""" % {
'web_link': bug.web_link,
'owner': format_user(bug.owner),
'date_created': bug.date_created.strftime("%b %d, %Y")
}
if bug.tags:
output += "|Tags|%s|" % bug.tags
output += bug.description.replace("Original description:\n", "")
return output
def format_user(user):
return "[%s](%s)" % (user.name, user.web_link)
def format_comment(message):
output = "#### Comment by %s on %s:\n" % \
(format_user(message.owner), message.date_created.strftime("%b %d, %Y"))
output += message.content
return output
def confirm_or_exit(prompt):
options = ['y','n','q']
option_prompt = '/'.join(options)
choice = None
while choice not in options:
choice = raw_input("%s (%s): " % (prompt, option_prompt)).lower()
if choice == 'y':
return True
if choice == 'n':
return False
if choice == 'q':
raise SystemExit(0)
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| 27.816901
| 100
| 0.679241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1,018
| 0.257722
|
54a4f81f72eecfec1f015beea32efd5b9edfa7de
| 168
|
py
|
Python
|
Curso-em-video-Python3-mundo1/ex024.py
|
bernardombraga/Solucoes-exercicios-cursos-gratuitos
|
0347a8325443fce84e0a753c96f523a22858537b
|
[
"MIT"
] | null | null | null |
Curso-em-video-Python3-mundo1/ex024.py
|
bernardombraga/Solucoes-exercicios-cursos-gratuitos
|
0347a8325443fce84e0a753c96f523a22858537b
|
[
"MIT"
] | null | null | null |
Curso-em-video-Python3-mundo1/ex024.py
|
bernardombraga/Solucoes-exercicios-cursos-gratuitos
|
0347a8325443fce84e0a753c96f523a22858537b
|
[
"MIT"
] | null | null | null |
entrada = str(input('Em que cidade você nasceu? '))
cidade = entrada.strip().lower()
partido = cidade.split()
pnome = partido[0]
santo = (pnome == 'santo')
print(santo)
| 28
| 51
| 0.684524
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 37
| 0.218935
|
54a68c80a2f5f81aaa165bc135be5a9f31aa99a1
| 8,754
|
py
|
Python
|
tests/unit/test_parameters/test_lead_acid_parameters.py
|
jatin837/PyBaMM
|
837421bd5b251647a257c23540ceb2908a225bdb
|
[
"BSD-3-Clause"
] | 1
|
2021-04-25T09:53:40.000Z
|
2021-04-25T09:53:40.000Z
|
tests/unit/test_parameters/test_lead_acid_parameters.py
|
jatin837/PyBaMM
|
837421bd5b251647a257c23540ceb2908a225bdb
|
[
"BSD-3-Clause"
] | null | null | null |
tests/unit/test_parameters/test_lead_acid_parameters.py
|
jatin837/PyBaMM
|
837421bd5b251647a257c23540ceb2908a225bdb
|
[
"BSD-3-Clause"
] | null | null | null |
#
# Test for the standard lead acid parameters
#
import pybamm
from tests import get_discretisation_for_testing
import unittest
class TestStandardParametersLeadAcid(unittest.TestCase):
def test_scipy_constants(self):
param = pybamm.LeadAcidParameters()
self.assertAlmostEqual(param.R.evaluate(), 8.314, places=3)
self.assertAlmostEqual(param.F.evaluate(), 96485, places=0)
def test_print_parameters(self):
parameters = pybamm.LeadAcidParameters()
parameter_values = pybamm.lead_acid.BaseModel().default_parameter_values
output_file = "lead_acid_parameters.txt"
parameter_values.print_parameters(parameters, output_file)
# test print_parameters with dict and without C-rate
del parameter_values["Nominal cell capacity [A.h]"]
parameters = {"C_e": parameters.C_e, "sigma_n": parameters.sigma_n}
parameter_values.print_parameters(parameters)
def test_parameters_defaults_lead_acid(self):
# Load parameters to be tested
parameters = pybamm.LeadAcidParameters()
parameter_values = pybamm.lead_acid.BaseModel().default_parameter_values
param_eval = parameter_values.print_parameters(parameters)
param_eval = {k: v[0] for k, v in param_eval.items()}
# Diffusional C-rate should be smaller than C-rate
self.assertLess(param_eval["C_e"], param_eval["C_rate"])
# Dimensionless electrode conductivities should be large
self.assertGreater(
parameter_values.evaluate(parameters.sigma_n(parameters.T_ref)), 10
)
self.assertGreater(
parameter_values.evaluate(parameters.sigma_p(parameters.T_ref)), 10
)
# Rescaled dimensionless electrode conductivities should still be large
self.assertGreater(
parameter_values.evaluate(parameters.sigma_n_prime(parameters.T_ref)), 10
)
self.assertGreater(
parameter_values.evaluate(parameters.sigma_p_prime(parameters.T_ref)), 10
)
# Dimensionless double-layer capacity should be small
self.assertLess(param_eval["C_dl_n"], 1e-3)
self.assertLess(param_eval["C_dl_p"], 1e-3)
# Volume change positive in negative electrode and negative in positive
# electrode
self.assertLess(param_eval["DeltaVsurf_n"], 0)
self.assertGreater(param_eval["DeltaVsurf_p"], 0)
def test_concatenated_parameters(self):
# create
param = pybamm.LeadAcidParameters()
s_param = param.s_plus_S
self.assertIsInstance(s_param, pybamm.Concatenation)
self.assertEqual(
s_param.domain, ["negative electrode", "separator", "positive electrode"]
)
# process parameters and discretise
parameter_values = pybamm.ParameterValues(
chemistry=pybamm.parameter_sets.Sulzer2019
)
disc = get_discretisation_for_testing()
processed_s = disc.process_symbol(parameter_values.process_symbol(s_param))
# test output
combined_submeshes = disc.mesh.combine_submeshes(
"negative electrode", "separator", "positive electrode"
)
self.assertEqual(processed_s.shape, (combined_submeshes.npts, 1))
def test_current_functions(self):
# create current functions
param = pybamm.LeadAcidParameters()
dimensional_current_density = param.dimensional_current_density_with_time
dimensionless_current_density = param.current_with_time
# process
parameter_values = pybamm.ParameterValues(
{
"Electrode height [m]": 0.1,
"Electrode width [m]": 0.1,
"Negative electrode thickness [m]": 1,
"Separator thickness [m]": 1,
"Positive electrode thickness [m]": 1,
"Typical electrolyte concentration [mol.m-3]": 1,
"Number of electrodes connected in parallel to make a cell": 8,
"Typical current [A]": 2,
"Current function [A]": 2,
}
)
dimensional_current_density_eval = parameter_values.process_symbol(
dimensional_current_density
)
dimensionless_current_density_eval = parameter_values.process_symbol(
dimensionless_current_density
)
self.assertAlmostEqual(
dimensional_current_density_eval.evaluate(t=3), 2 / (8 * 0.1 * 0.1)
)
self.assertEqual(dimensionless_current_density_eval.evaluate(t=3), 1)
def test_thermal_parameters(self):
values = pybamm.lead_acid.BaseModel().default_parameter_values
param = pybamm.LeadAcidParameters()
T = 1 # dummy temperature as the values are constant
# Density
self.assertAlmostEqual(values.evaluate(param.rho_cn(T)), 0.8810, places=2)
self.assertAlmostEqual(values.evaluate(param.rho_n(T)), 0.8810, places=2)
self.assertAlmostEqual(values.evaluate(param.rho_s(T)), 0.7053, places=2)
self.assertAlmostEqual(values.evaluate(param.rho_p(T)), 1.4393, places=2)
self.assertAlmostEqual(values.evaluate(param.rho_cp(T)), 1.4393, places=2)
self.assertAlmostEqual(values.evaluate(param.rho(T)), 1.7102, places=2)
# Thermal conductivity
self.assertAlmostEqual(values.evaluate(param.lambda_cn(T)), 1.6963, places=2)
self.assertAlmostEqual(values.evaluate(param.lambda_n(T)), 1.6963, places=2)
self.assertAlmostEqual(values.evaluate(param.lambda_s(T)), 0.0019, places=2)
self.assertAlmostEqual(values.evaluate(param.lambda_p(T)), 1.6963, places=2)
self.assertAlmostEqual(values.evaluate(param.lambda_cp(T)), 1.6963, places=2)
def test_functions_lead_acid(self):
# Load parameters to be tested
param = pybamm.LeadAcidParameters()
parameters = {
"D_e_1": param.D_e(pybamm.Scalar(1), pybamm.Scalar(0)),
"kappa_e_0": param.kappa_e(pybamm.Scalar(0), pybamm.Scalar(0)),
"chi_1": param.chi(pybamm.Scalar(1), pybamm.Scalar(0)),
"chi_0.5": param.chi(pybamm.Scalar(0.5), pybamm.Scalar(0)),
"U_n_1": param.U_n(pybamm.Scalar(1), pybamm.Scalar(0)),
"U_n_0.5": param.U_n(pybamm.Scalar(0.5), pybamm.Scalar(0)),
"U_p_1": param.U_p(pybamm.Scalar(1), pybamm.Scalar(0)),
"U_p_0.5": param.U_p(pybamm.Scalar(0.5), pybamm.Scalar(0)),
}
# Process
parameter_values = pybamm.ParameterValues(
chemistry=pybamm.parameter_sets.Sulzer2019
)
param_eval = parameter_values.print_parameters(parameters)
param_eval = {k: v[0] for k, v in param_eval.items()}
# Known values for dimensionless functions
self.assertEqual(param_eval["D_e_1"], 1)
self.assertEqual(param_eval["kappa_e_0"], 0)
# Known monotonicity for dimensionless functions
self.assertGreater(param_eval["chi_1"], param_eval["chi_0.5"])
self.assertLess(param_eval["U_n_1"], param_eval["U_n_0.5"])
self.assertGreater(param_eval["U_p_1"], param_eval["U_p_0.5"])
def test_update_initial_state_of_charge(self):
# Load parameters to be tested
parameters = pybamm.LeadAcidParameters()
parameter_values = pybamm.lead_acid.BaseModel().default_parameter_values
param_eval = parameter_values.print_parameters(parameters)
param_eval = {k: v[0] for k, v in param_eval.items()}
# Update initial state of charge
parameter_values.update({"Initial State of Charge": 0.2})
param_eval_update = parameter_values.print_parameters(parameters)
param_eval_update = {k: v[0] for k, v in param_eval_update.items()}
# Test that relevant parameters have changed as expected
self.assertLess(param_eval_update["q_init"], param_eval["q_init"])
self.assertLess(param_eval_update["c_e_init"], param_eval["c_e_init"])
self.assertLess(
param_eval_update["epsilon_n_init"], param_eval["epsilon_n_init"]
)
self.assertEqual(
param_eval_update["epsilon_s_init"], param_eval["epsilon_s_init"]
)
self.assertLess(
param_eval_update["epsilon_p_init"], param_eval["epsilon_p_init"]
)
self.assertGreater(
param_eval_update["curlyU_n_init"], param_eval["curlyU_n_init"]
)
self.assertGreater(
param_eval_update["curlyU_p_init"], param_eval["curlyU_p_init"]
)
if __name__ == "__main__":
print("Add -v for more debug output")
import sys
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| 43.77
| 85
| 0.664154
| 8,431
| 0.963103
| 0
| 0
| 0
| 0
| 0
| 0
| 1,759
| 0.200937
|
54a9266c033c65ceff0e6381eb549dcffd4ece05
| 890
|
py
|
Python
|
firmware/temphumid/timeset.py
|
schizobovine/unicorder
|
3165922c2662b1bd2c5ab1691c89e2af5ee185e7
|
[
"CC-BY-4.0"
] | null | null | null |
firmware/temphumid/timeset.py
|
schizobovine/unicorder
|
3165922c2662b1bd2c5ab1691c89e2af5ee185e7
|
[
"CC-BY-4.0"
] | null | null | null |
firmware/temphumid/timeset.py
|
schizobovine/unicorder
|
3165922c2662b1bd2c5ab1691c89e2af5ee185e7
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python
from datetime import datetime
import serial
import sys
import time
SERIAL_BAUD = 9600
SERIAL_PORT = '/dev/ttyUSB0'
TIME_FORMAT = "T%s"
# Reset device to activate time setting routine
DO_RST = True
# Open serial dong
print 'opening serial port %s...' % SERIAL_PORT
uart = serial.Serial(
port=SERIAL_PORT,
baudrate=SERIAL_BAUD,
dsrdtr=DO_RST,
)
# Frobulate the DTR pin to reset the target
if DO_RST:
print 'twiddling DTR to reset'
uart.setRTS(False)
uart.setDTR(False)
uart.flush()
time.sleep(0.2)
uart.flushInput()
uart.setRTS(True)
uart.setDTR(True)
time.sleep(1)
print 'reset done'
# Send start command to begin cycle
time.sleep(1)
for i in xrange(0, 30):
time.sleep(0.1)
now = datetime.now().strftime(TIME_FORMAT)
uart.write(now + "\r\n")
uart.flush()
uart.close()
print 'done!'
sys.exit(0)
| 18.93617
| 47
| 0.683146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 259
| 0.291011
|
54a991a385bd9da3a9f26780efab2ed38b49007b
| 3,789
|
py
|
Python
|
setup.py
|
giampaolo/pysendfile
|
2ffdd452b03dd4b639cda985bd67b8d4c0c34a5f
|
[
"MIT"
] | 119
|
2015-01-06T10:26:35.000Z
|
2021-12-03T06:22:47.000Z
|
setup.py
|
giampaolo/pysendfile
|
2ffdd452b03dd4b639cda985bd67b8d4c0c34a5f
|
[
"MIT"
] | 11
|
2015-02-06T18:01:26.000Z
|
2022-03-14T09:51:28.000Z
|
setup.py
|
giampaolo/pysendfile
|
2ffdd452b03dd4b639cda985bd67b8d4c0c34a5f
|
[
"MIT"
] | 24
|
2015-01-13T20:08:46.000Z
|
2021-07-30T13:45:15.000Z
|
#!/usr/bin/env python
# ======================================================================
# This software is distributed under the MIT license reproduced below:
#
# Copyright (C) 2009-2014 Giampaolo Rodola' <g.rodola@gmail.com>
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Giampaolo Rodola' not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# Giampaolo Rodola' DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT Giampaolo Rodola' BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ======================================================================
import sys
try:
from setuptools import Extension, setup
except ImportError:
from distutils.core import Extension, setup
NAME = 'pysendfile'
VERSION = '2.0.1'
if sys.version_info < (2, 5):
sys.exit('python version not supported (< 2.5)')
if 'sunos' in sys.platform:
libraries = ["sendfile"]
else:
libraries = []
def main():
setup(name=NAME,
url='https://github.com/giampaolo/pysendfile',
version=VERSION,
description='A Python interface to sendfile(2)',
long_description=open('README.rst', 'r').read(),
author='Giampaolo Rodola',
author_email='g.rodola@gmail.com',
platforms='UNIX',
license='MIT',
keywords=['sendfile', 'python', 'performance', 'ftp'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: BSD :: FreeBSD',
'Operating System :: POSIX :: SunOS/Solaris',
'Operating System :: POSIX :: AIX',
'Programming Language :: C',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: System :: Networking',
'Topic :: System :: Operating System',
'Topic :: Internet :: File Transfer Protocol (FTP)',
'Topic :: Internet :: WWW/HTTP',
'License :: OSI Approved :: MIT License',
],
ext_modules=[Extension('sendfile',
sources=['sendfilemodule.c'],
libraries=libraries)])
if __name__ == '__main__':
main()
| 40.308511
| 73
| 0.5801
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,590
| 0.683558
|
54a9d8c8660ee37792168966ac376aefeed7599f
| 3,248
|
py
|
Python
|
V1_backup/macro_ssh.py
|
YuanYuLin/iopcrestapi_client
|
5c1683d1b5b44bd8bb641933d9526cee97075d31
|
[
"MIT"
] | null | null | null |
V1_backup/macro_ssh.py
|
YuanYuLin/iopcrestapi_client
|
5c1683d1b5b44bd8bb641933d9526cee97075d31
|
[
"MIT"
] | null | null | null |
V1_backup/macro_ssh.py
|
YuanYuLin/iopcrestapi_client
|
5c1683d1b5b44bd8bb641933d9526cee97075d31
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python2.7
import sys
import time
import pprint
import libiopc_rest as rst
def gen_ssh_key(hostname, out_format):
payload = '{'
payload += '"ops":"gen_ssh_key"'
payload += '}'
return rst.http_post_ops_by_pyaload(hostname, payload)
def get_status_until_key_generated(hostname, out_format):
ssh_status_id = 2
while True :
rsp = rst.http_get_status(hostname, ssh_status_id)
if int(rsp.status_code) == 200 :
obj = rsp.json()
if (obj['status'] | 0x01) == 0x01:
rst.response_output(out_format, rsp)
return
time.sleep(2)
def set_env(hostname, out_format):
payload = '{'
payload += '"ops":"setenv",'
payload += '"env":"SSH_AUTH_NAME=mehlow"'
payload += '}'
return rst.http_post_ops_by_pyaload(hostname, payload)
def set_authname(hostname, out_format):
payload = '{'
payload += '"ops":"set_authname",'
payload += '"name":"helloworld"'
payload += '}'
rst.response_output(out_format, rst.http_post_ops_by_pyaload(hostname, payload))
def set_authsalt(hostname, out_format):
payload = '{'
payload += '"ops":"set_authsalt",'
payload += '"salt":"$6$01234$56789"'
payload += '}'
rst.response_output(out_format, rst.http_post_ops_by_pyaload(hostname, payload))
def set_authhash(hostname, out_format):
payload = '{'
payload += '"ops":"set_authhash",'
payload += '"hash":"$6$01234$40kDc/J3OMiWCRafMKQjAU5M6wAgEnKlhpsqFn8t.koNyBcRSguYQwLkIS90F2uHIc7hBPp.HSgCNgl8F955X/"'
payload += '}'
rst.response_output(out_format, rst.http_post_ops_by_pyaload(hostname, payload))
def start_ssh(hostname, out_format):
#
# curl -d '{"ops":"start_ssh"}' -H "Content-Type: application/json; charset=utf-8" -A 'iopc-app' -X POST http://<IP_ADDRESS>/api/v1/ops
#
payload = '{'
payload += '"ops":"start_ssh"'
payload += '}'
return rst.http_post_ops_by_pyaload(hostname, payload)
def stop_ssh(hostname, out_format):
payload = '{'
payload += '"ops":"stop_ssh"'
payload += '}'
return rst.http_post_ops_by_pyaload(hostname, payload)
def gen_start_ssh(hostname, out_format):
gen_ssh_key(hostname, out_format)
get_status_until_key_generated(hostname, out_format)
start_ssh(hostname, out_format)
action_list=[
{"NAME":"set_env", "FUNCTION":set_env},
{"NAME":"gen_ssh_key", "FUNCTION":gen_ssh_key},
{"NAME":"start_ssh", "FUNCTION":start_ssh},
{"NAME":"stop_ssh", "FUNCTION":stop_ssh},
]
def request_list(hostname, out_format, action):
for act in action_list:
if action == act["NAME"] and act["FUNCTION"]:
status_code, json_objs = act["FUNCTION"](hostname, out_format)
if status_code == 200:
pprint.pprint(json_objs)
else:
print "sub request error: %s" % obj
else:
print ""
def help_usage():
rst.out("rest_cli.py <hostname> <action>")
rst.out("action:")
for act in action_list:
rst.out(" %s," % act["NAME"])
sys.exit(1)
if __name__ == '__main__':
if len(sys.argv) < 3:
help_usage()
hostname=sys.argv[1]
action=sys.argv[2]
request_list(hostname, 'json', action)
| 29.798165
| 139
| 0.638855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 764
| 0.235222
|
54aae49452e8676142b61393e18f197e00851192
| 4,746
|
py
|
Python
|
PatternConverter.py
|
Suitceyes-Project-Code/Tactile-Brush-Python
|
12da563d0988aa3b41c547ee9e1618f30c8b805c
|
[
"MIT"
] | null | null | null |
PatternConverter.py
|
Suitceyes-Project-Code/Tactile-Brush-Python
|
12da563d0988aa3b41c547ee9e1618f30c8b805c
|
[
"MIT"
] | null | null | null |
PatternConverter.py
|
Suitceyes-Project-Code/Tactile-Brush-Python
|
12da563d0988aa3b41c547ee9e1618f30c8b805c
|
[
"MIT"
] | 1
|
2021-10-04T14:27:25.000Z
|
2021-10-04T14:27:25.000Z
|
from Stroke import Stroke
from TactileBrush import TactileBrush
import json
from sortedcontainers import SortedList
EPSILON = 0.001
class Point:
def __init__(self, x : int, y : int):
self.x = int(x)
self.y = int(y)
def __repr__(self):
return "(" + str(self.x) + ", " + str(self.y) + ")"
def __key(self):
return (self.x, self.y)
def __eq__(self, value):
if isinstance(value, Point):
return self.__key() == value.__key()
return NotImplemented
def __hash__(self):
h = hash(self.__key())
return h
class ActuatorValue:
__slots__ = ("pin", "value")
def __init__(self, pin : int, value : float):
self.pin = pin
self.value = value
class Frame:
__slots__ = ("time", "actuators")
def __init__(self, time : float):
self.time = time
self.actuators = set()
class VibrationPattern:
__slots__ = ("isLooped", "duration", "interpolation", "frames")
def __init__(self, duration : float, is_looped : bool, interpolation : int):
self.duration = duration
self.isLooped = is_looped
self.interpolation = interpolation
self.frames = SortedList(key = lambda frame: frame.time) # sort frames by time
def add_frame(self, frame : Frame):
for f in self.frames:
time = abs(f.time - frame.time)
if time < EPSILON:
f.actuators |= frame.actuators
return
self.frames.add(frame)
def to_json(self):
d = dict()
d["isLooped"] = self.isLooped
d["duration"] = self.duration / 1000.0
d["interpolation"] = self.interpolation
d["frames"] = list()
for f in self.frames:
fr = dict()
fr["time"] = f.time / 1000.0
fr["actuators"] = list()
for actuator in f.actuators:
a = dict()
a["pin"] = actuator.pin
a["value"] = actuator.value
fr["actuators"].append(a)
d["frames"].append(fr)
return json.dumps(d, indent=4, sort_keys=True)
class Config:
with open('config.json') as json_file:
config = json.load(json_file)
lines = config["grid"]["lines"]
columns = config["grid"]["columns"]
spacing = config["grid"]["spacing"]
mapping = dict()
for coord in config["mapping"]:
coords_list = coord.split(",")
mapping[Point(coords_list[0], coords_list[1])] = int(config["mapping"][coord])
def create_pattern(motion : dict):
pattern = VibrationPattern(duration, False, 0)
for activation_time, steps in motion.items():
# Create starting frame
start_frame = Frame(activation_time)
for step in steps:
# Calculate end time
end_time = max(0, min(activation_time + step.duration, pattern.duration))
point = Point(step.column, step.line)
# Get pin from config
pin = Config.mapping[point]
value = step.intensity
# Add to starting frame
start_frame.actuators.add(ActuatorValue(pin, value))
# Create end frame
end_frame = Frame(end_time)
end_frame.actuators.add(ActuatorValue(pin, 0.0))
# Add frames
pattern.add_frame(start_frame)
pattern.add_frame(end_frame)
return pattern
def get_position_from_string(s : str):
s = s.strip() # remove whitespace
pos_x = 0
pos_y = 0
try:
split = s.split(',')
pos_x = float(split[0])
pos_y = float(split[1])
except Exception as e:
raise Exception("Invalid position was passed. Format must be 'x,y.")
return pos_x, pos_y
def get_duration_from_string(s : str):
s = s.strip()
duration = 0
try:
duration = float(s)
except Exception as e:
raise Exception("Invalid duration was passed. A decimal value must be passed.")
return duration
if __name__ == "__main__":
print("Enter stroke start position (x,y):")
start_str = input()
start_x, start_y = get_position_from_string(start_str)
print("Enter stroke start position (x,y):")
end_str = input()
end_x, end_y = get_position_from_string(end_str)
print("Enter duration of stroke in msec:")
duration_str = input()
duration = get_duration_from_string(duration_str)
t = TactileBrush(Config.lines, Config.columns, Config.spacing)
s = Stroke(start_x, start_y, end_x, end_y, duration, 1)
motion = t.compute_stroke_steps(s)
pattern = create_pattern(motion)
print("Json Pattern:\n")
print(pattern.to_json())
| 29.849057
| 90
| 0.588074
| 2,442
| 0.514539
| 0
| 0
| 0
| 0
| 0
| 0
| 657
| 0.138432
|
54ab3bd5170524abc405764a761515f4dbe3bb71
| 14,921
|
py
|
Python
|
ConnectedClipboard.py
|
yamanogluberk/ConnectedClipboard
|
93aa04a2075b6ed2b6d50fce39a7c26dd80e8564
|
[
"MIT"
] | null | null | null |
ConnectedClipboard.py
|
yamanogluberk/ConnectedClipboard
|
93aa04a2075b6ed2b6d50fce39a7c26dd80e8564
|
[
"MIT"
] | null | null | null |
ConnectedClipboard.py
|
yamanogluberk/ConnectedClipboard
|
93aa04a2075b6ed2b6d50fce39a7c26dd80e8564
|
[
"MIT"
] | null | null | null |
import select
import socket
import json
import threading
import time
import clipboard
import math
from datetime import datetime
ip = ""
localpart = ""
name = ""
tcp = 5555
udp = 5556
buffer_size = 1024
broadcast_try_count = 3
ping_try_count = 3
members = [] # item - (str) ipaddress
current_room_ip = ""
my_room_name = "" # only room owner has this data
discovered_rooms = set() # item - (roomname, roomip)
REQUESTED_ROOM = ("", "")
CLIPBOARD_DATA = clipboard.paste()
CLIPBOARD_LOCK = threading.Lock()
DATA_LOCK = threading.Lock()
SHARED_TIME_BASE = 0
PRIVATE_TIME_BASE = 0
LATENCY = 0
RECEIVED_PING_COUNTER = 0
LAST_CHANGED_TS = 0
is_main_ui = True
input_active = True
def main():
print()
print("*****************************************")
print("**** WELCOME TO Clipboarder ****")
print("*****************************************")
print()
get_ip()
listen_udp = threading.Thread(target=start_listening_udp)
listen_udp.setDaemon(True)
listen_udp.start()
listen_tcp = threading.Thread(target=start_listening_tcp)
listen_tcp.setDaemon(True)
listen_tcp.start()
listen_cb = threading.Thread(target=listening_clipboard)
listen_cb.setDaemon(True)
listen_cb.start()
send_discover()
main_ui_info()
input_ui()
listen_cb.join()
listen_udp.join()
listen_tcp.join()
def input_ui():
global is_main_ui
global input_active
while True:
cmd = input()
if not input_active:
continue
if is_main_ui:
splitted = cmd.strip().split(" ")
if len(splitted) >= 2 and splitted[0] == "/create":
create_new_room(' '.join(splitted[1:]))
elif len(splitted) >= 2 and splitted[0] == "/join":
input_active = False
join_room(' '.join(splitted[1:]))
elif len(splitted) == 1 and splitted[0] == "/quit":
terminate()
elif len(splitted) == 1 and splitted[0] == "/refresh":
discovered_rooms.clear()
main_ui_info()
send_discover()
else:
if cmd.strip() == "/leave":
leave_room()
elif cmd.strip() == "/list":
list_users()
def main_ui_info():
if len(discovered_rooms) == 0:
print()
print("There is no active rooms in the network!")
print()
else:
for item in discovered_rooms:
print("Active rooms:")
print()
print(item[0])
print()
print(" ********************************************* ")
print()
print("Type /create <roomname> to create a new room")
print("Type /refresh to refresh active room list")
print("Type /join <roomname> to join an existing room")
print("Type /quit to exit the application")
print()
print(" ********************************************* ")
def room_ui_info():
print()
print(f"There are {len(members)} members in the room!")
print()
print(" ********************************************* ")
print()
print("Type /leave to leave the current room")
print("Type /list to list users in the room")
print()
print(" ********************************************* ")
def create_new_room(room_name):
global is_main_ui
global my_room_name
global current_room_ip
my_room_name = room_name
current_room_ip = ip
members.append(ip)
print("New room created with name ", room_name)
room_ui_info()
is_main_ui = False
def join_room(room_name):
global is_main_ui
global input_active
global REQUESTED_ROOM
for item in discovered_rooms:
if room_name == item[0]:
send_connect(item[1])
REQUESTED_ROOM = item
return
print()
print("This room doesnt exist!")
print()
input_active = True
def leave_room():
global current_room_ip
global members
global is_main_ui
global my_room_name
global SHARED_TIME_BASE
global PRIVATE_TIME_BASE
global LATENCY
global RECEIVED_PING_COUNTER
if current_room_ip == ip: # DISBAND GROUP
for mem in members:
if mem != ip:
send_kick(mem)
current_room_ip = ""
my_room_name = ""
members.clear()
main_ui_info()
is_main_ui = True
else: # LEAVE GROUP
send_disconnect(current_room_ip)
current_room_ip = ""
members.clear()
main_ui_info()
is_main_ui = True
SHARED_TIME_BASE = 0
PRIVATE_TIME_BASE = 0
LATENCY = 0
RECEIVED_PING_COUNTER = 0
def list_users():
k = 1
print("Current users:")
for mem in members:
print(str(k) + " -> " + mem)
k = k + 1
def terminate():
exit()
def get_ip():
global ip
global localpart
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
temp = "127.0.0.1"
try:
s.connect(("8.8.8.8", 80))
temp = s.getsockname()[0]
finally:
s.close()
parts = temp.split(".")
localpart = parts[3]
ip = temp
def start_listening_udp():
while True:
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.bind(("", udp))
s.setblocking(False)
result = select.select([s], [], [])
msg = result[0][0].recv(buffer_size)
infer_data(msg.decode())
def start_listening_tcp():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind((ip, tcp))
s.listen()
while True:
conn, addr = s.accept()
with conn:
data = ""
while True:
temp = conn.recv(buffer_size)
if not temp:
break
data += temp.decode()
handle_tcp_req = threading.Thread(target=infer_data, args=(data,))
handle_tcp_req.setDaemon(True)
handle_tcp_req.start()
#infer_data(data)
def infer_data(data):
try:
data = json.loads(data)
if data["IP"] == ip:
return
if data["TYPE"] == "DISCOVER_ROOMS":
discover_received(data)
elif data["TYPE"] == "RESPOND_ROOM":
respond_received(data)
elif data["TYPE"] == "CONNECT":
connect_received(data)
elif data["TYPE"] == "DISCONNECT":
disconnect_received(data)
elif data["TYPE"] == "CONNECTION_APPROVED":
connection_approved_received(data)
elif data["TYPE"] == "NEW_MEMBER":
new_member_received(data)
elif data["TYPE"] == "MEMBER_DISCONNECTED":
member_disconnected_received(data)
elif data["TYPE"] == "KICK":
kick_received(data)
elif data["TYPE"] == "CLIPBOARD":
clipboard_received(data)
elif data["TYPE"] == "PING":
ping_received(data)
elif data["TYPE"] == "PING_RESPOND":
ping_respond_received(data)
elif data["TYPE"] == "REQUEST_TIMESTAMP":
receive_timestamp_request(data)
elif data["TYPE"] == "RECEIVE TIMESTAMP":
receive_timestamp(data)
except:
print("The received packet is not Json or not the proper practice of the protocol!")
def discover_received(data):
if my_room_name.strip() != "":
send_respond(data["IP"], my_room_name)
def respond_received(data):
newroom = (data["DATA"], data["IP"])
if newroom not in discovered_rooms:
discovered_rooms.add(newroom)
main_ui_info()
def connect_received(data):
if my_room_name.strip() == "":
print("Received connect when there is no owned room!!!")
return
elif data["IP"] in members:
pass
else:
for mem in members:
if mem != ip:
send_new_member(mem, data["IP"])
members.append(data["IP"])
send_connection_approved(data["IP"])
def disconnect_received(data):
if data["IP"] in members:
members.remove(data["IP"])
for mem in members:
if mem != ip:
send_member_disconnected(mem, data["IP"])
def connection_approved_received(data):
global current_room_ip
global members
global is_main_ui
global input_active
global REQUESTED_ROOM
global LATENCY
global RECEIVED_PING_COUNTER
if current_room_ip == "" and REQUESTED_ROOM[1] == data["IP"]:
REQUESTED_ROOM = ("", "")
current_room_ip = data["IP"]
members = data["DATA"]
is_main_ui = False
input_active = True
room_ui_info()
for x in range(ping_try_count):
send_ping(current_room_ip)
with DATA_LOCK:
LATENCY = LATENCY - get_current_timestamp()
counter = 0
while RECEIVED_PING_COUNTER != ping_try_count:
time.sleep(0.1)
counter = counter + 1
if counter > 100:
return
send_timestamp_request(current_room_ip)
def send_ping(target_ip):
data = f"{get_json('PING')}"
send_message_tcp(data, target_ip)
def send_ping_respond(target_ip):
data = f"{get_json('PING_RESPOND')}"
send_message_tcp(data, target_ip)
def ping_received(data):
global current_room_ip
if current_room_ip == ip and data["IP"] in members:
send_ping_respond(data["IP"])
def ping_respond_received(data):
global current_room_ip
global LATENCY
global RECEIVED_PING_COUNTER
if current_room_ip == data["IP"]:
with DATA_LOCK:
LATENCY = LATENCY + get_current_timestamp()
#print("PING RESPOND RECEIVED::PING LATENCY --> " + str(LATENCY))
RECEIVED_PING_COUNTER = RECEIVED_PING_COUNTER + 1
def send_timestamp_request(target_ip):
data = f"{get_json('REQUEST_TIMESTAMP')}"
send_message_tcp(data, target_ip)
def receive_timestamp_request(data):
global current_room_ip
if current_room_ip == ip and data["IP"] in members:
send_timestamp(data["IP"])
def send_timestamp(target_ip):
ct = get_current_timestamp()
data = f"{get_json('RECEIVE TIMESTAMP', ct)}"
send_message_tcp(data, target_ip)
def receive_timestamp(data):
global SHARED_TIME_BASE
global PRIVATE_TIME_BASE
if current_room_ip == data["IP"]:
SHARED_TIME_BASE = data["DATA"]
SHARED_TIME_BASE = SHARED_TIME_BASE + (LATENCY / (ping_try_count * 2))
PRIVATE_TIME_BASE = get_current_timestamp()
print("LATENCY --> " + str((LATENCY / (ping_try_count * 2))))
print("SHARED_TIME_BASE --> " + str(SHARED_TIME_BASE))
print("PRIVATE_TIME_BASE --> " + str(PRIVATE_TIME_BASE))
def new_member_received(data):
if (data["IP"] == current_room_ip) and (data["DATA"] not in members):
members.append(data["DATA"])
def member_disconnected_received(data):
if (data["IP"] == current_room_ip) and (data["DATA"] in members):
members.remove(data["DATA"])
def kick_received(data):
global current_room_ip
global members
global is_main_ui
global my_room_name
global RECEIVED_PING_COUNTER
global SHARED_TIME_BASE
global PRIVATE_TIME_BASE
global LATENCY
if data["IP"] == current_room_ip:
current_room_ip = ""
members.clear()
main_ui_info()
is_main_ui = True
SHARED_TIME_BASE = 0
PRIVATE_TIME_BASE = 0
LATENCY = 0
RECEIVED_PING_COUNTER = 0
def listening_clipboard():
global CLIPBOARD_DATA
global LAST_CHANGED_TS
while True:
with CLIPBOARD_LOCK:
current_clipboard = clipboard.paste()
if CLIPBOARD_DATA != current_clipboard:
clipboard_ts = SHARED_TIME_BASE + (get_current_timestamp() - PRIVATE_TIME_BASE)
for mem in members:
if mem != ip:
send_clipboard(mem, clipboard_ts, current_clipboard)
CLIPBOARD_DATA = current_clipboard
LAST_CHANGED_TS = clipboard_ts
time.sleep(0.1)
def clipboard_received(data):
global CLIPBOARD_DATA
global LAST_CHANGED_TS
with CLIPBOARD_LOCK:
if LAST_CHANGED_TS < data["TIMESTAMP"]:
CLIPBOARD_DATA = data["DATA"]
LAST_CHANGED_TS = data["TIMESTAMP"]
clipboard.copy(CLIPBOARD_DATA)
def send_clipboard(target_ip, clipboard_ts, clipboard_data):
data = f"{get_json_ts('CLIPBOARD', clipboard_ts, clipboard_data)}"
send_message_tcp(data, target_ip)
def send_discover():
data = f"{get_json('DISCOVER_ROOMS')}"
send_broadcast(data)
def send_respond(target_ip, room_name):
data = f"{get_json('RESPOND_ROOM', room_name)}"
send_message_tcp(data, target_ip)
def send_connect(target_ip):
data = f"{get_json('CONNECT')}"
send_message_tcp(data, target_ip)
def send_disconnect(target_ip):
data = f"{get_json('DISCONNECT')}"
send_message_tcp(data, target_ip)
def send_kick(target_ip):
data = f"{get_json('KICK')}"
send_message_tcp(data, target_ip)
def send_connection_approved(target_ip):
data = f"{get_json('CONNECTION_APPROVED', members)}"
send_message_tcp(data, target_ip)
def send_new_member(target_ip, member_ip):
data = f"{get_json('NEW_MEMBER', member_ip)}"
send_message_tcp(data, target_ip)
def send_member_disconnected(target_ip, member_ip):
data = f"{get_json('MEMBER_DISCONNECTED', member_ip)}"
send_message_tcp(data, target_ip)
def send_broadcast(data):
for x in range(broadcast_try_count):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.sendto(data.encode(), ('<broadcast>', udp))
s.close()
def send_message_tcp(data, destination):
thread = threading.Thread(target=send_message_thread, args=(data, destination), daemon=True)
thread.start()
def send_message_thread(packet, destination):
global current_room_ip
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(1)
s.connect((destination, tcp))
s.sendall(packet.encode())
except:
print("!! Unexpected offline member detected !!")
def get_json(typename, data=None):
packet = {"IP": ip, "TYPE": typename, "DATA": data}
return json.dumps(packet)
def get_json_ts(typename, timestamp, data):
packet = {"IP": ip, "TYPE": typename, "TIMESTAMP": timestamp, "DATA": data}
return json.dumps(packet)
def get_current_timestamp():
ts = datetime.now().timestamp() * 1000
ts = math.floor(ts)
return ts
if __name__ == '__main__':
main()
| 27.079855
| 96
| 0.602171
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,296
| 0.153877
|
54ae8f3aab6c6047677661a66e0ddd7fd0d3d3e9
| 9,728
|
py
|
Python
|
paddleslim/prune/auto_pruner.py
|
liuqiaoping7/PaddleSlim
|
083003661af893e92cd7bb9017e7d4a3761c7b20
|
[
"Apache-2.0"
] | null | null | null |
paddleslim/prune/auto_pruner.py
|
liuqiaoping7/PaddleSlim
|
083003661af893e92cd7bb9017e7d4a3761c7b20
|
[
"Apache-2.0"
] | null | null | null |
paddleslim/prune/auto_pruner.py
|
liuqiaoping7/PaddleSlim
|
083003661af893e92cd7bb9017e7d4a3761c7b20
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import logging
import numpy as np
import paddle.fluid as fluid
from .pruner import Pruner
from ..core import VarWrapper, OpWrapper, GraphWrapper
from ..common import SAController
from ..common import get_logger
from ..analysis import flops
from ..common import ControllerServer
from ..common import ControllerClient
__all__ = ["AutoPruner"]
_logger = get_logger(__name__, level=logging.INFO)
class AutoPruner(object):
"""
Search a group of ratios used to prune program.
Args:
program(Program): The program to be pruned.
scope(Scope): The scope to be pruned.
place(fluid.Place): The device place of parameters.
params(list<str>): The names of parameters to be pruned.
init_ratios(list<float>|float): Init ratios used to pruned parameters in `params`.
List means ratios used for pruning each parameter in `params`.
The length of `init_ratios` should be equal to length of params when `init_ratios` is a list.
If it is a scalar, all the parameters in `params` will be pruned by uniform ratio.
None means get a group of init ratios by `pruned_flops` of `pruned_latency`. Default: None.
pruned_flops(float): The percent of FLOPS to be pruned. Default: None.
pruned_latency(float): The percent of latency to be pruned. Default: None.
server_addr(tuple): A tuple of server ip and server port for controller server.
init_temperature(float): The init temperature used in simulated annealing search strategy.
reduce_rate(float): The decay rate used in simulated annealing search strategy.
max_try_times(int): The max number of trying to generate legal tokens.
max_client_num(int): The max number of connections of controller server.
search_steps(int): The steps of searching.
max_ratios(float|list<float>): Max ratios used to pruned parameters in `params`.
List means max ratios for each parameter in `params`.
The length of `max_ratios` should be equal to length of params when `max_ratios` is a list.
If it is a scalar, it will used for all the parameters in `params`.
min_ratios(float|list<float>): Min ratios used to pruned parameters in `params`.
List means min ratios for each parameter in `params`.
The length of `min_ratios` should be equal to length of params when `min_ratios` is a list.
If it is a scalar, it will used for all the parameters in `params`.
key(str): Identity used in communication between controller server and clients.
is_server(bool): Whether current host is controller server. Default: True.
"""
def __init__(self,
program,
scope,
place,
params=[],
init_ratios=None,
pruned_flops=0.5,
pruned_latency=None,
server_addr=("", 0),
init_temperature=100,
reduce_rate=0.85,
max_try_times=300,
max_client_num=10,
search_steps=300,
max_ratios=[0.9],
min_ratios=[0],
key="auto_pruner",
is_server=True):
self._program = program
self._scope = scope
self._place = place
self._params = params
self._init_ratios = init_ratios
self._pruned_flops = pruned_flops
self._pruned_latency = pruned_latency
self._reduce_rate = reduce_rate
self._init_temperature = init_temperature
self._max_try_times = max_try_times
self._is_server = is_server
self._range_table = self._get_range_table(min_ratios, max_ratios)
self._pruner = Pruner()
if self._pruned_flops:
self._base_flops = flops(program)
self._max_flops = self._base_flops * (1 - self._pruned_flops)
_logger.info(
"AutoPruner - base flops: {}; pruned_flops: {}; max_flops: {}".
format(self._base_flops, self._pruned_flops, self._max_flops))
if self._pruned_latency:
self._base_latency = latency(program)
if self._init_ratios is None:
self._init_ratios = self._get_init_ratios(
self, _program, self._params, self._pruned_flops,
self._pruned_latency)
init_tokens = self._ratios2tokens(self._init_ratios)
_logger.info("range table: {}".format(self._range_table))
controller = SAController(
self._range_table,
self._reduce_rate,
self._init_temperature,
self._max_try_times,
init_tokens,
constrain_func=self._constrain_func)
server_ip, server_port = server_addr
if server_ip == None or server_ip == "":
server_ip = self._get_host_ip()
self._controller_server = ControllerServer(
controller=controller,
address=(server_ip, server_port),
max_client_num=max_client_num,
search_steps=search_steps,
key=key)
# create controller server
if self._is_server:
self._controller_server.start()
self._controller_client = ControllerClient(
self._controller_server.ip(),
self._controller_server.port(),
key=key)
self._iter = 0
self._param_backup = {}
def _get_host_ip(self):
return socket.gethostbyname(socket.gethostname())
def _get_init_ratios(self, program, params, pruned_flops, pruned_latency):
pass
def _get_range_table(self, min_ratios, max_ratios):
assert isinstance(min_ratios, list) or isinstance(min_ratios, float)
assert isinstance(max_ratios, list) or isinstance(max_ratios, float)
min_ratios = min_ratios if isinstance(
min_ratios, list) else [min_ratios] * len(self._params)
max_ratios = max_ratios if isinstance(
max_ratios, list) else [max_ratios] * len(self._params)
min_tokens = self._ratios2tokens(min_ratios)
max_tokens = self._ratios2tokens(max_ratios)
return (min_tokens, max_tokens)
def _constrain_func(self, tokens):
ratios = self._tokens2ratios(tokens)
pruned_program, _, _ = self._pruner.prune(
self._program,
self._scope,
self._params,
ratios,
place=self._place,
only_graph=True)
current_flops = flops(pruned_program)
result = current_flops < self._max_flops
if not result:
_logger.info("Failed try ratios: {}; flops: {}; max_flops: {}".
format(ratios, current_flops, self._max_flops))
else:
_logger.info("Success try ratios: {}; flops: {}; max_flops: {}".
format(ratios, current_flops, self._max_flops))
return result
def prune(self, program, eval_program=None):
"""
Prune program with latest tokens generated by controller.
Args:
program(fluid.Program): The program to be pruned.
Returns:
paddle.fluid.Program: The pruned program.
"""
self._current_ratios = self._next_ratios()
pruned_program, _, _ = self._pruner.prune(
program,
self._scope,
self._params,
self._current_ratios,
place=self._place,
only_graph=False,
param_backup=self._param_backup)
pruned_val_program = None
if eval_program is not None:
pruned_val_program, _, _ = self._pruner.prune(
program,
self._scope,
self._params,
self._current_ratios,
place=self._place,
only_graph=True)
_logger.info("AutoPruner - pruned ratios: {}".format(
self._current_ratios))
return pruned_program, pruned_val_program
def reward(self, score):
"""
Return reward of current pruned program.
Args:
float: The score of pruned program.
"""
self._restore(self._scope)
self._param_backup = {}
tokens = self._ratios2tokens(self._current_ratios)
self._controller_client.update(tokens, score, self._iter)
self._iter += 1
def _restore(self, scope):
for param_name in self._param_backup.keys():
param_t = scope.find_var(param_name).get_tensor()
param_t.set(self._param_backup[param_name], self._place)
def _next_ratios(self):
tokens = self._controller_client.next_tokens()
return self._tokens2ratios(tokens)
def _ratios2tokens(self, ratios):
"""Convert pruned ratios to tokens.
"""
return [int(ratio / 0.01) for ratio in ratios]
def _tokens2ratios(self, tokens):
"""Convert tokens to pruned ratios.
"""
return [token * 0.01 for token in tokens]
| 39.384615
| 106
| 0.628701
| 8,706
| 0.894942
| 0
| 0
| 0
| 0
| 0
| 0
| 3,583
| 0.368318
|
54afe8421a6919e6ea315d052ac2b1d84c0d0ecd
| 387
|
py
|
Python
|
model-creator.py
|
LouisRoss/spiking-model-packager
|
de75a923e7332b73cb7252300af91d4620b6e801
|
[
"MIT"
] | null | null | null |
model-creator.py
|
LouisRoss/spiking-model-packager
|
de75a923e7332b73cb7252300af91d4620b6e801
|
[
"MIT"
] | null | null | null |
model-creator.py
|
LouisRoss/spiking-model-packager
|
de75a923e7332b73cb7252300af91d4620b6e801
|
[
"MIT"
] | null | null | null |
import sys
import json
from h5model import h5model
if len(sys.argv) < 2:
print('Usage: ' + sys.argv[0] + ' ' + '<model name>')
exit(1)
modelName = sys.argv[1]
model = h5model(modelName)
model.createModel()
if model.responseStatus >= 400:
print("Unable to create model '" + modelName + "': " + model.errorMessage, file = sys.stderr)
exit(1)
print(model.responseSuccessPayload)
| 22.764706
| 95
| 0.687339
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 57
| 0.147287
|
54b1f3e83d93705cfe337ba5f02b4044fdd2e4b8
| 70
|
py
|
Python
|
decimal to binary.py
|
Kshitijkrishnadas/haribol
|
ca45e633baaabaad3bb923f5633340ccf88d996c
|
[
"bzip2-1.0.6"
] | null | null | null |
decimal to binary.py
|
Kshitijkrishnadas/haribol
|
ca45e633baaabaad3bb923f5633340ccf88d996c
|
[
"bzip2-1.0.6"
] | null | null | null |
decimal to binary.py
|
Kshitijkrishnadas/haribol
|
ca45e633baaabaad3bb923f5633340ccf88d996c
|
[
"bzip2-1.0.6"
] | null | null | null |
a=''
n=int(input())
while n != 0:
a=str(n%2)+a
n//=2
print(a)
| 10
| 16
| 0.457143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2
| 0.028571
|
54b2b1435e7c0cbedc57669a7f3b6443192e3d9f
| 4,887
|
py
|
Python
|
settings/base.py
|
anthill-gaming/media
|
cc3292be8bd83aba6054e420124adabcfa4e3a8b
|
[
"MIT"
] | 1
|
2018-11-30T21:56:14.000Z
|
2018-11-30T21:56:14.000Z
|
settings/base.py
|
anthill-gaming/media
|
cc3292be8bd83aba6054e420124adabcfa4e3a8b
|
[
"MIT"
] | null | null | null |
settings/base.py
|
anthill-gaming/media
|
cc3292be8bd83aba6054e420124adabcfa4e3a8b
|
[
"MIT"
] | null | null | null |
from anthill.framework.utils.translation import translate_lazy as _
from anthill.platform.conf.settings import *
import os
# Build paths inside the application like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'nrc_!b1_n4!7cx!4!^&hfu^5axl3_fhki)rbyavnh@mthrk@op'
DEBUG = False
ADMINS = (
('Lysenko Vladimir', 'wofkin@gmail.com'),
)
SQLALCHEMY_DATABASE_URI = 'postgres://anthill_media@/anthill_media'
LOCATION = 'http://localhost:9615'
BROKER = 'amqp://guest:guest@localhost:5672'
# ROUTES_CONF = 'media.routes'
LOCALE_PATH = os.path.join(BASE_DIR, 'locale')
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# APPLICATION_CLASS = 'media.apps.AnthillApplication'
APPLICATION_NAME = 'media'
APPLICATION_VERBOSE_NAME = _('Media')
APPLICATION_DESCRIPTION = _('Manage user uploaded files')
APPLICATION_ICON_CLASS = 'icon-file-media'
APPLICATION_COLOR = 'teal'
# SERVICE_CLASS = 'media.services.Service'
CACHES["default"]["LOCATION"] = "redis://localhost:6379/25"
CACHES["default"]["KEY_PREFIX"] = "media.anthill"
EMAIL_SUBJECT_PREFIX = '[Anthill: media] '
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'anthill.framework.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'anthill.framework.utils.log.RequireDebugTrue',
},
},
'formatters': {
'anthill.server': {
'()': 'anthill.framework.utils.log.ServerFormatter',
'fmt': '%(color)s[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]%(end_color)s %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
'color': False,
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
'formatter': 'anthill.server',
},
'anthill.server': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOGGING_ROOT_DIR, 'media.log'),
'formatter': 'anthill.server',
'maxBytes': 100 * 1024 * 1024, # 100 MiB
'backupCount': 10
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'anthill.framework.utils.log.AdminEmailHandler'
}
},
'loggers': {
'anthill': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'anthill.application': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'tornado.access': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'tornado.application': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'tornado.general': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'celery': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'celery.worker': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'celery.task': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'celery.redirected': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
'asyncio': {
'handlers': ['anthill.server'],
'level': 'INFO',
'propagate': False
},
}
}
#########
# GEOIP #
#########
GEOIP_PATH = os.path.join(BASE_DIR, '../')
#########
# HTTPS #
#########
# HTTPS = {
# 'key_file': os.path.join(BASE_DIR, '../server.key'),
# 'crt_file': os.path.join(BASE_DIR, '../server.crt'),
# }
HTTPS = None
############
# GRAPHENE #
############
GRAPHENE = {
'SCHEMA': 'media.api.v1.public.schema',
'MIDDLEWARE': ()
}
#############
# THUMBNAIL #
#############
THUMBNAIL_DEFAULT_OPTIONS = {
'resize': 'fill', # 'fill', 'fit', 'stretch'
'upscale': True,
'format': None, # 'JPEG', 'PNG'
'quality': 90,
'progressive': True,
'orientation': True,
'optimize': False,
}
THUMBNAIL_ALIASES = {
'test': {
'geometry': '250x250',
'filters': [('crop', '250x250', 'center', 'center')],
'options': {'optimize': True, 'quality': 90, 'format': 'PNG'}
}
}
THUMBNAIL_DIR = 'thumbs'
| 27
| 108
| 0.529568
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,618
| 0.535707
|
54b51b30bb070d1462b530e3aafb5daba4e65245
| 2,787
|
py
|
Python
|
odmltables/gui/wizutils.py
|
fabianschlebusch/python-odmltables
|
90a7833516afe8864b40947f4a1757830a0dc44c
|
[
"BSD-3-Clause"
] | 6
|
2017-10-27T16:59:53.000Z
|
2021-03-02T06:08:48.000Z
|
odmltables/gui/wizutils.py
|
fabianschlebusch/python-odmltables
|
90a7833516afe8864b40947f4a1757830a0dc44c
|
[
"BSD-3-Clause"
] | 68
|
2016-01-26T10:48:16.000Z
|
2021-11-16T10:09:49.000Z
|
odmltables/gui/wizutils.py
|
fabianschlebusch/python-odmltables
|
90a7833516afe8864b40947f4a1757830a0dc44c
|
[
"BSD-3-Clause"
] | 7
|
2015-11-24T12:40:18.000Z
|
2021-04-14T08:02:53.000Z
|
# -*- coding: utf-8 -*-
import os, sys
from PyQt5.QtWidgets import (QWizard, QMessageBox)
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import pyqtSlot, Qt
try:
import odmltables
have_odmltables = True
except:
have_odmltables = False
from .settings import Settings
class OdmltablesWizard(QWizard):
def __init__(self, wizname, parent=None):
super(OdmltablesWizard, self).__init__(parent)
self.wizname = wizname
self.settingsfile = os.path.join(os.path.expanduser("~"),
'.odmltables',
wizname.replace(' ', '').lower() + '.conf')
# initialize settings
self.settings = Settings(self.settingsfile)
# setting starting page of wizard
# self.setStartId(0)
self.setOption(self.IndependentPages, False)
# images won't show in Windows 7 if style not set
self.setWizardStyle(self.ModernStyle)
self.setOption(self.HaveHelpButton, True)
logo_filename = "odMLtables_100x100.png"
logo_dirs = [os.path.join(os.path.dirname(__file__), '..', '..', 'logo'),
os.path.join(sys.prefix, 'share/pixmaps')]
for logo_dir in logo_dirs:
filepath = os.path.join(logo_dir, logo_filename)
if os.path.exists(filepath):
self.setPixmap(QWizard.LogoPixmap, QPixmap(filepath))
# set up help messages
self._lastHelpMsg = ''
self._helpMsgs = self._createHelpMsgs()
self.helpRequested.connect(self._showHelp)
self.setWindowTitle(self.tr(wizname))
def _createHelpMsgs(self):
raise NotImplementedError()
@pyqtSlot()
def _showHelp(self):
# get the help message for the current page
msg = self._helpMsgs[self.currentId()]
# # if same as last message, display alternate message
# if msg == self._lastHelpMsg:
# msg = self._helpMsgs[self.NUM_PAGES + 1]
doc_link = "<p>For detailed information about odMLtables refer to the " \
"<a href='http://pythonhosted.org/python-odmltables'>odMLtables " \
"documentation</a>.</p>"
msgBox = QMessageBox()
msgBox.setWindowTitle("Help")
msgBox.setTextFormat(Qt.RichText)
msgBox.setText(msg + doc_link)
msgBox.exec_()
# QMessageBox.information(self,
# self.tr(self.wizname),
# msg)
# self._lastHelpMsg = msg
def get_graphic_path():
if have_odmltables:
data_path = os.path.join(os.path.dirname(odmltables.__file__),
'gui',
'graphics')
return data_path
| 32.406977
| 86
| 0.588805
| 2,272
| 0.815213
| 0
| 0
| 851
| 0.305346
| 0
| 0
| 728
| 0.261213
|
54b6c94b65480166ee80c689e0b477e97f134499
| 25,440
|
py
|
Python
|
trainLib.py
|
dorukb/ceng445-trainSim
|
01af1c556dbce4e3f1c07fc16a21cd94cdeb7884
|
[
"MIT"
] | null | null | null |
trainLib.py
|
dorukb/ceng445-trainSim
|
01af1c556dbce4e3f1c07fc16a21cd94cdeb7884
|
[
"MIT"
] | null | null | null |
trainLib.py
|
dorukb/ceng445-trainSim
|
01af1c556dbce4e3f1c07fc16a21cd94cdeb7884
|
[
"MIT"
] | null | null | null |
import math
#constants and globals
background = '0'
NORTH = 0
EAST = 1
SOUTH = 2
WEST = 3
dirs = {0 : "NORTH", 1 : "EAST", 2 : "SOUTH", 3 : "WEST"}
class CellElement(): #CellELement Interface for the subclasses
#Subclasses: RegularRoad, Switch, LevelCrossing, Bridge, Station
def setPosition(self, x, y):
return
def setOrientation(self, a):
return
def switchState(self):
return
def getDuration(self, entdir):
return
def getStop(self, entdir):
return
def nextCell(self,entdir):
return
def getView():
return
# Additional Interface methods added by us
def setCwRot(self):
return
def canEnter(self, entdir): # it checks the availability of the next cell in case of there is another train.
return
def getPos(self):
return
class GameGrid():
def __init__ (self, row, col):
self.row = row
self.col = col
self.grid = []
self.view = []
# Train refs to draw them on screen, on top of the tile view.
self.activeTrains = []
#default grid creation filled with background
for i in range(0, row):
self.grid.append([])
self.view.append([])
for j in range(0, col):
c = RegularRoad(True, self.grid)
#Eventhough it assigns a RegularRoad to every cell, we make it background changing the visuals of the cell. (bkz. CellElement.visuals)
#We choose it to implemet that way to avoid a creation for empty subclass for background cells and not to make code more complex.
c.visuals = '_'
c.setPosition(i,j)
self.grid[i].append(c)
#view grid is seperate than the actual grid. It keeps the visulas and used for display issues.
self.view[i].append(c.visuals)
def addElement(self, cellElm, row, col):
cellElm.setPosition(row, col)
self.grid[row][col] = cellElm
self.view[row][col] = cellElm.visuals
return
def removeElement(self, row, col):
empty = RegularRoad(True, self.grid) # (bkz. GameGrid.__init___ (): line 51)
empty.visuals = '_'
self.grid[row][col] = empty
self.view[row][col] = '_' # visual for background
return
def display(self):
for i in range(0,self.row):
for j in range(0, self.col):
print(self.view[i][j], end=' ')
print('\n')
def isOutOfBounds(self, i, j): #check whether the given positions exists or not
if(i >= self.row or j >= self.col or i < 0 or j < 0):
return True
return False
def updateView(self): # We provide this functionality by updtaing the view grid and display function where it needed.
return
def startSimulation(self):
return
def setPauseResume(self):
return
def stopSimulation(self):
return
def spawnTrain(self, wagonCount, row, col): # Creates trains at given row and column
if(self.isOutOfBounds(row,col)):
print("invalid spawn pos for train.", row, col)
return
spawnCell = self.grid[row][col]
t = Train(wagonCount, spawnCell, self)
self.registerTrain(t) # register train for the grid.
#For the phase1 it is not that functional but when we have more trains in later phases it will be used as it supposed to.
return t
def registerTrain(self, train):
self.activeTrains.append(train)
return
def trainDisappear(self,train):
self.activeTrains.remove(train)
return
def hasTrain(self, row, col): #it checks whether there is a train in the given cell or not
for t in self.activeTrains:
if(t.enginePosRow == row and t.enginePosCol == col):
return True
return False
class RegularRoad(CellElement):
# RegularRoad can be either a straight road or a right turn.
# We class them as this since they both have one entrance and exit.
def __init__(self, isStraight, gridRef):
self.visuals = '_'
self.rotationCount = 0
self.myGrid = gridRef #needs grid reference since we have to reach there to update grid.
self.row = -1
self.col = -1
self.isRegular = isStraight # if it is not straigt, it is a right turn. We exclude left turn here since it is the one time rotated version of right turn.
# For the sake of simplicity, we define left turn by rotating the right turn.
if(isStraight):
self.dir1 = SOUTH
self.dir2 = NORTH
self.visuals = '|'
else: # default is a Right turn as in the pdf.
# rotate this one time CW to get a left turn if needed
self.visuals = 'R'
self.dir1 = SOUTH
self.dir2 = EAST
return
def makeLeftTurn(self): # used for make a left turn from a right turn.
self.visuals = 'L'
self.rotationCount = 0 # When we rotate to get left turn the count has been increased.
# rotation count is assigned to 0 again since it should be a base case.
self.setOrientation( 1, False)
return self
def setPosition(self, row, col):
self.row = row
self.col = col
return
def setCwRot(self): #it assigns the new directions CW of the roads.
self.dir1 = (self.dir1 + 1) % 4
self.dir2 = (self.dir2 + 1) % 4
return
def setOrientation(self, rotationAmount, incrRot : bool = True): #if incrRot is given False, it doesn't update the rotation amount. It is used for left turn object orientation.
if(incrRot):
self.rotationCount = (self.rotationCount + rotationAmount) % 4 # else assign the value in mod 4 to be able to detect new directions correctly.
for i in range(0, rotationAmount):
self.setCwRot() #does the real job
return
def switchState(self):
return
def getDuration(self, entdir): # default 1 for Regular Road
return 1
def getStop(self, entdir): # default 0 for Regular Road since not stop there
return 0
def nextCell(self,entdir):
# if on the edge cells, and dir is outward, train will disappear
# calculate exit direction of the cell using dir values.
self.exitDir = None
#if the given direction is the dir1 assign dir2 as exitDir and vice verca.
if(self.dir1 == entdir):
self.exitDir = self.dir2
elif self.dir2 == entdir:
self.exitDir = self.dir1
else: # if the given direction is not valid, exit
return None
#According to exitDir, if the nextCell is not out of bounds, return the nextCell
if(self.exitDir == NORTH and self.myGrid.isOutOfBounds(self.row-1, self.col) == False):
# # row-1, col unchanged
return(self.myGrid.grid[self.row-1][self.col] )
elif(self.exitDir == SOUTH and self.myGrid.isOutOfBounds(self.row+1, self.col) == False):
# # row+1, col unchanged
return(self.myGrid.grid[self.row+1][self.col])
elif(self.exitDir == WEST and self.myGrid.isOutOfBounds(self.row, self.col-1) == False):
# # col-1, row unchanged
return(self.myGrid.grid[self.row][self.col-1])
elif(self.exitDir == EAST and self.myGrid.isOutOfBounds(self.row, self.col+1) == False):
# # col+1, row unchanged
return(self.myGrid.grid[self.row][self.col+1])
else: # no available cell is found
return None
def getPos(self):
return self.row, self.col
def getView(self):
return self.visuals
def canEnter(self, entdir):
#check the availability / connectivity of nextcell
return (self.dir1 == entdir or self.dir2 == entdir)
class SwitchRoad(CellElement):
#There are three types of switchRoad. Explained in lines:237, 241, 246
def __init__(self, typeofSwitch, gridRef):
# create 'pieces' of the switch using RegularRoad since switches are just the combinations of them.
self.visuals = 'S'
self.myGrid = gridRef
self.rotationCount = 0
self.switchType = typeofSwitch # int value 1,2,3
self.pieces = {'direct' : RegularRoad(True, gridRef)} #We kept the pieces of the switches according to its type.
#for example, switchType-3 has one direct, one rightTurn and one leftTurn.
#since all switches has one RegulaarRoad in common, it is added the dictionary by default.
self.activePiece = self.pieces['direct'] # Keeps track of which part of the switch is active.
#Changed by switchState(). Defualt straight piece is the active one.
self.enter = SOUTH #default switch entrance location is south for all type of switches
self.switchDelay = 2 #used for make train slower in switches.
if(self.switchType == 1):
# straight + right turn
self.pieces['rightTurn'] = RegularRoad(False, gridRef)
elif(self.switchType == 2):
# straight + left turn
self.pieces['leftTurn'] = RegularRoad(False, gridRef) #As explained in RegularRoad class, it is cretaed as a right turn first.
self.pieces['leftTurn'].setOrientation(1, False) #Then rotate it one time and not update the rotationCount.
elif(self.switchType == 3):
# straight + right turn + left turn
self.pieces['rightTurn'] = RegularRoad(False, gridRef)
self.pieces['leftTurn'] = RegularRoad(False, gridRef)
self.pieces['leftTurn'].setOrientation(1, False)
return
def setPosition(self, row, col):
self.row = row
self.col = col
return
def setCwRot(self):
# straightforward 90 degree rotation: S->W, W -> N and so on.
self.enter = (self.enter + 1) % 4
if(self.switchType == 1):
self.pieces['rightTurn'].setOrientation(1)
self.pieces['direct'].setOrientation(1)
elif(self.switchType == 2):
self.pieces['leftTurn'].setOrientation(1)
self.pieces['direct'].setOrientation(1)
else: #switchType is 3
self.pieces['rightTurn'].setOrientation(1)
self.pieces['direct'].setOrientation(1)
self.pieces['leftTurn'].setOrientation(1)
return
def setOrientation(self, rotationAmount):
# rotate 90 degrees CW, directly change dir variables.
self.rotationCount = (self.rotationCount + rotationAmount) % 4
for i in range(0, rotationAmount):
self.setCwRot()
return
def switchState(self):
# defined only for switch roads. Changes which piece is active.
if(self.switchType == 1):
# if the direct is the active one, make the rightTurn active, and vice verca.
if(self.activePiece == self.pieces['direct']):
self.activePiece = self.pieces['rightTurn']
else:
self.activePiece = self.pieces['direct']
elif(self.switchType == 2):
# if the direct is the active one, make the leftTurn active, and vice verca.
if(self.activePiece == self.pieces['direct']):
self.activePiece = self.pieces['leftTurn']
else:
self.activePiece = self.pieces['direct']
elif(self.switchType == 3):
#change state in CW order starting with direct. direct->rightTurn->leftTurn->direct
if(self.activePiece == self.pieces['direct']):
self.activePiece = self.pieces['rightTurn']
elif(self.activePiece == self.pieces['rightTurn']):
self.activePiece = self.pieces['leftTurn']
else:
self.activePiece = self.pieces['direct']
return
def getDuration(self, entdir):
# add switch delay to default duration of the active piece
return self.activePiece.getDuration(entdir) + self.switchDelay
def getStop(self, entdir):
# Train does NOT stop on this cell.
return self.activePiece.getStop(entdir)
def nextCell(self,entdir):
# if on the edge cells, and dir is outward, train will disappear
# use activePiece to decide on exit direction if any
# if the entrance is default direction, set exitDir according to active piece
# else, if the entrance is one of the NotSwitched directions, treat it as a RegularRoad.
if(entdir == self.enter):
self.exitDir = None
if(self.activePiece.dir1 == entdir):
self.exitDir = self.activePiece.dir2
elif(self.activePiece.dir2 == entdir):
self.exitDir = self.activePiece.dir1
else:
print("invalid entry direction for this cell.")
return None
else:
self.exitDir = self.enter
#According to exitDir, if the nextCell is not out of bounds, return the nextCell
if(self.exitDir == NORTH and self.myGrid.isOutOfBounds(self.row-1, self.col) == False):
# # row-1, col unchanged
return(self.myGrid.grid[self.row-1][self.col] )
elif(self.exitDir == SOUTH and self.myGrid.isOutOfBounds(self.row+1, self.col) == False):
# # row+1, col unchanged
return(self.myGrid.grid[self.row+1][self.col])
elif(self.exitDir == WEST and self.myGrid.isOutOfBounds(self.row, self.col-1) == False):
# # col-1, row unchanged
return(self.myGrid.grid[self.row][self.col-1])
elif(self.exitDir == EAST and self.myGrid.isOutOfBounds(self.row, self.col+1) == False):
# # col+1, row unchanged
return(self.myGrid.grid[self.row][self.col+1])
else: #no available cell is found
return None
def getView(self):
return self.visuals
def getPos(self):
return self.row, self.col
def canEnter(self, entdir):
#check the availability / connectivity of nextcell
canEnter = False
res = self.activePiece.canEnter(entdir)
canEnter = canEnter or res
return canEnter
class LevelCrossing(CellElement):
# if all are in the '+' shape as shown in pdf, then rotation does not matter for these tiles.
def __init__(self, gridRef):
self.visuals = '+'
self.rotationCount = 0
self.myGrid = gridRef
self.row = -1
self.col = -1
# has all 4 directions.
# always exit entdir+2 in mod 4. So, no need the assign directions.
return
def setPosition(self, row, col):
self.row = row
self.col = col
return
def setOrientation(self, rotationAmount, incrRot : bool = True):
# since rotation does not make sense, just incrementing the rotationCount is enough.
if(incrRot):
self.rotationCount = (self.rotationCount + rotationAmount) % 4
return
def getDuration(self, entdir):
return 1
def getStop(self, entdir):
# return 0(no waiting) if no other train parts are at this cell
# if any trains, calculate upper bound on how long we should wait for them. possible deadlock here
# fro Phase1, 0 is enough. Remaining will be impleneted in later phases.
return 0
def nextCell(self,entdir):
# if on the edge cells, and dir is outward, train will disappear
# calculate exit direction of the cell using dir value.
# has all 4 directions. always exit entdir+2 in mod 4.
self.exitDir = (entdir + 2) % 4
#According to exitDir, if the nextCell is not out of bounds, return the nextCell
if(self.exitDir == NORTH and self.myGrid.isOutOfBounds(self.row-1, self.col) == False):
# # row-1, col unchanged
return(self.myGrid.grid[self.row-1][self.col] )
elif(self.exitDir == SOUTH and self.myGrid.isOutOfBounds(self.row+1, self.col) == False):
# # row+1, col unchanged
return(self.myGrid.grid[self.row+1][self.col])
elif(self.exitDir == WEST and self.myGrid.isOutOfBounds(self.row, self.col-1) == False):
# # col-1, row unchanged
return(self.myGrid.grid[self.row][self.col-1])
elif(self.exitDir == EAST and self.myGrid.isOutOfBounds(self.row, self.col+1) == False):
# # col+1, row unchanged
return(self.myGrid.grid[self.row][self.col+1])
else: #no available cell is found
return None
def getPos(self):
return self.row, self.col
def getView(self):
return self.visuals
def canEnter(self, entdir):
# has all 4 directions. can always enter EXCEPT when there is another train here.
if(self.myGrid.hasTrain(self.row, self.col)):
return False
else:
return True
class BridgeCrossing(CellElement):
# if all are in the '+' shape as shown in pdf, then rotation does not matter for these tiles on phase1.
def __init__(self, gridRef):
self.visuals = '\u03A9' #visual is the omega sign
self.rotationCount = 0
self.myGrid = gridRef
self.row = -1
self.col = -1
# Bridge is on West-East road segment as default.
# other regular road dir can be deduced from these two.
self.bridgeDir1 = WEST
self.bridgeDir2 = EAST
# all 4 directions always exit entdir+2 in mod 4.
return
def setPosition(self, row, col):
self.row = row
self.col = col
return
def setCwRot(self):
self.bridgeDir1 = (self.bridgeDir1 + 1) % 4
self.bridgeDir2 = (self.bridgeDir2 + 1) % 4
return
def setOrientation(self, rotationAmount, incrRot : bool = True):
#rotation makes sense here, we change the bridge's segment.
if(incrRot):
self.rotationCount = (self.rotationCount + rotationAmount) % 4
for i in range(0, rotationAmount):
self.setCwRot()
return
def getDuration(self, entdir):
return 1
def getStop(self, entdir):
return 0
def nextCell(self,entdir):
# if on the edge cells, and dir is outward, train will disappear
# calculate exit direction of the cell using dir value.
# has all 4 directions. always exit entdir+2 in mod 4.
self.exitDir = (entdir + 2) % 4
#According to exitDir, if the nextCell is not out of bounds, return the nextCell
if(self.exitDir == NORTH and self.myGrid.isOutOfBounds(self.row-1, self.col) == False):
# # row-1, col unchanged
return(self.myGrid.grid[self.row-1][self.col] )
elif(self.exitDir == SOUTH and self.myGrid.isOutOfBounds(self.row+1, self.col) == False):
# # row+1, col unchanged
return(self.myGrid.grid[self.row+1][self.col])
elif(self.exitDir == WEST and self.myGrid.isOutOfBounds(self.row, self.col-1) == False):
# # col-1, row unchanged
return(self.myGrid.grid[self.row][self.col-1])
elif(self.exitDir == EAST and self.myGrid.isOutOfBounds(self.row, self.col+1) == False):
# # col+1, row unchanged
return(self.myGrid.grid[self.row][self.col+1])
else: #no available cell is found
return None
def getPos(self):
return self.row, self.col
def getView(self):
return self.visuals
def canEnter(self, entdir):
# has all 4 directions. can always enter since bridge prevents from a collision.
return True
class Station(CellElement):
#It is just like a straight regularRoad, but for simplcity we don't create it using RegularRoad class.
def __init__(self, gridRef):
self.visuals = '\u0394' #the visual is the delta sign.
self.rotationCount = 0
self.myGrid = gridRef
self.row = -1
self.col = -1
#default dir values
self.dir1 = SOUTH
self.dir2 = NORTH
return
def setPosition(self, row, col):
self.row = row
self.col= col
return
def setCwRot(self):
self.dir1 = (self.dir1 + 1) % 4
self.dir2 = (self.dir2 + 1) % 4
return
def setOrientation(self, rotationAmount, incrRot : bool = True):
#like a straight road, increment rotationcount and rotate the directions rotationAmount times.
if(incrRot):
self.rotationCount = (self.rotationCount + rotationAmount) % 4
for i in range(0, rotationAmount):
self.setCwRot()
return
def switchState(self):
return
def getDuration(self, entdir): #since it will be stopped in station, add the deault value to the stop value.
return 1 + self.getStop(entdir)
def getStop(self, entdir):
return 10
def nextCell(self,entdir):
# if on the edge cells, and dir is outward, train will disappear
# calculate exit direction of the cell using dir value.
self.exitDir = None
if(self.dir1 == entdir):
self.exitDir = self.dir2
elif self.dir2 == entdir:
self.exitDir = self.dir1
else:
return None
#According to exitDir, if the nextCell is not out of bounds, return the nextCell
if(self.exitDir == NORTH and self.myGrid.isOutOfBounds(self.row-1, self.col) == False):
# # row-1, col unchanged
return(self.myGrid.grid[self.row-1][self.col] )
elif(self.exitDir == SOUTH and self.myGrid.isOutOfBounds(self.row+1, self.col) == False):
# # row+1, col unchanged
return(self.myGrid.grid[self.row+1][self.col])
elif(self.exitDir == WEST and self.myGrid.isOutOfBounds(self.row, self.col-1) == False):
# # col-1, row unchanged
return(self.myGrid.grid[self.row][self.col-1])
elif(self.exitDir == EAST and self.myGrid.isOutOfBounds(self.row, self.col+1) == False):
# # col+1, row unchanged
return(self.myGrid.grid[self.row][self.col+1])
else: #no available cell is found
return None
def getPos(self):
return self.row, self.col
def getView(self):
return self.visuals
def canEnter(self, entdir):
#check the availability / connectivity of nextcell
return (self.dir1 == entdir or self.dir2 == entdir)
class Train():
#GameGrid takes care of the created trains and their effcts in the grid view.
def __init__(self, nWagons, cell : CellElement, gridRef : GameGrid):
self.wagonCount = nWagons
self.totalLength = nWagons+1 # cars + train engine
self.currCell = cell
self.wagonCountPerCell = 2 # effectively, each 'car' takes 1/2 of a cell.
self.gridRef = gridRef # ref to GameGrid to be in communication.
self.coveredCellCount = math.ceil(self.totalLength / self.wagonCountPerCell)
# one of: "moving", "movingReverse", "stopped"
self.status = "moving"
self.enginePosRow, self.enginePosCol = cell.getPos()
return
def enterCell(self, nextCell : CellElement, entdir):
#it locates the train in a given cell position using entdir value.
self.currDir = entdir
self.enginePosRow, self.enginePosCol = nextCell.getPos()
self.currCell = nextCell
def advance(self):
#it moves the train to the available next cell
nextCell = self.currCell.nextCell(self.currDir)
self.currDir = (self.currCell.exitDir + 2) % 4 #when we go to nextcell, exitDir of previous cell become the entDir for the current cell.
#For example, when we move to cell at south, the entdir becomes the north, which is 2 direction away from the exitDir of previous cell.
if(nextCell is None):
# self.gridRef.trainDisappear(self), will be implemented
return False
elif(nextCell.visuals == '_'):
#nextcell is background
return False
else:
# update pos
self.currCell = nextCell
self.enginePosRow, self.enginePosCol = nextCell.getPos()
return True
def getEnginePos(self):
return self.enginePosRow, self.enginePosCol
def getStatus(self):
return self.status
def getGeometry(self):
# Gets the geometry of the train path, engine and cars.
# Implemented in later phases where full train needs to be displayed on a curve during simulation
return
| 39.564541
| 190
| 0.596502
| 25,256
| 0.992767
| 0
| 0
| 0
| 0
| 0
| 0
| 8,329
| 0.327398
|
54b7f3a8b8887e8d822b83c326d0049cfae95c7f
| 25,083
|
py
|
Python
|
nar_module/nar/preprocessing/nar_preprocess_cafebiz_2.py
|
13520505/bigdataproj
|
09202c7e13366726415b1111cc93d3083d102cb3
|
[
"MIT"
] | null | null | null |
nar_module/nar/preprocessing/nar_preprocess_cafebiz_2.py
|
13520505/bigdataproj
|
09202c7e13366726415b1111cc93d3083d102cb3
|
[
"MIT"
] | 9
|
2020-01-28T23:07:43.000Z
|
2022-02-10T00:36:23.000Z
|
nar_module/nar/preprocessing/nar_preprocess_cafebiz_2.py
|
13520505/bigdataproj
|
09202c7e13366726415b1111cc93d3083d102cb3
|
[
"MIT"
] | null | null | null |
import argparse
import glob
import json
import os
import os.path
import re
import sys
from collections import Counter, defaultdict
from datetime import datetime
from os import path
import numpy as np
import pandas as pd
import tensorflow as tf
from acr_module.acr.acr_module_service import get_all_file, load_json_config
from nar_module.nar.tf_records_management import (make_sequential_feature,
save_rows_to_tf_record_file)
from nar_module.nar.utils import (deserialize, extract_local_hour_weekday,
gini_index, serialize)
# sys.path.append("/home/tungtv/Documents/Code/News/newsrecomdeepneural")
from pick_singleton.pick_singleton import ACR_Pickle_Singleton
from redis_connector.RedisClient import PageView, RedisClient, Session
sys.path.append("/data/tungtv/Code/NewsRecomDeepLearning")
# from ..tf_records_management import save_rows_to_tf_record_file, make_sequential_feature
# from ..utils import serialize, deserialize, hash_str_to_int, extract_local_hour_weekday, gini_index
def create_args_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--input_sessions_json_folder_path', default='',
help='Input path of the folder with sessions in JSON lines file, organized by hour (exported by the Spark script - nar_preprocessing_addressa_01_dataproc.ipynb).')
parser.add_argument(
'--input_acr_metadata_embeddings_path', default='',
help='Input path for a pickle with articles metadata and content embeddings, generated by ACR module.')
parser.add_argument(
'--input_nar_encoders_dict_path', default='',
help='Input path for a pickle with the dictionary encoders for categorical features (exported by the Spark script - nar_preprocessing_addressa_01_dataproc.ipynb)')
parser.add_argument(
'--number_hours_to_preprocess', type=int, default=-1,
help='Number of hours to preprocess')
parser.add_argument(
'--output_nar_preprocessing_resources_path', default='',
help='Output path for a pickle with label encoders and num scalers of clicks data.')
parser.add_argument(
'--output_sessions_tfrecords_path', default='',
help='Output path for TFRecords generated with user sessions')
return parser
def load_acr_module_resources(acr_module_resources_path):
(acr_label_encoders, articles_metadata_df, content_article_embeddings) = \
deserialize(acr_module_resources_path)
articles_metadata_df.set_index('article_id', inplace=False)
# articles_metadata_df.index = articles_metadata_df.index.astype(str)
def get_article_text_length(article_id):
# article_id is str
# print("articale_id: {}".format(article_id))
# text_length = articles_metadata_df.loc[article_id]['text_length']
if article_id == 0:
numeric_scalers['text_length']['avg']
text_length = articles_metadata_df[articles_metadata_df['article_id'] == article_id]['text_length'].values[0]
# print("text_length")
# print(text_length)
return text_length
def get_article_id_encoded(article_id):
return acr_label_encoders['article_id'][article_id]
#tf.logging.info("Read ACR label encoders for: {}".format(acr_label_encoders.keys()))
#article_id_label_encoder = acr_label_encoders['article_id']
return get_article_text_length, get_article_id_encoded
def load_nar_module_resources(nar_encoders_dict_path):
nar_encoders_dict = \
deserialize(nar_encoders_dict_path)
print("Read NAR label encoders dict for: {}".format(nar_encoders_dict.keys()))
return nar_encoders_dict
def load_sessions_json_file(json_path):
with open(json_path, 'r') as fi:
for line in fi:
yield json.loads(line)
def load_sessions_hour(session_hour_path):
sessions = []
for session_file in os.listdir(session_hour_path):
session_file_path = os.path.join(session_hour_path, session_file)
sessions_hour = load_sessions_json_file(session_file_path)
for session in sessions_hour:
sessions.append(session)
return sessions
def load_sessions_hours(folder_path):
#Sorting hours directories (treating cases where number of digits is lower. E.x. "session_hour=3" < "session_hour=20")
hour_folders = sorted([path for path in os.listdir(folder_path) \
if os.path.isdir(os.path.join(folder_path,path))],
key=lambda x: "{:0>5}".format(x.split('=')[1]))
for hour_folder in hour_folders:
hour_index = int(hour_folder.split('=')[1])
hour_folder_path = os.path.join(folder_path, hour_folder)
sessions_hour = load_sessions_hour(hour_folder_path)
yield (hour_index, sessions_hour)
numeric_scalers = {
'_elapsed_ms_since_last_click': {
#Set Maximum of 60 min, just to separate returning users, whose elapsed time since last click will be greater than the max 30-min limit for sessions
'valid_max': 60 * 60 * 1000.0,
'avg': 789935.7,
'stddev': 1371436.0},
'active_time_secs': {
'valid_max': 900.0,
'avg': 65.0,
'stddev': 69.37},
'active_time_secs_by_word': {
'valid_max': 10.0,
'avg': 1.854,
'stddev': 1.474},
'text_length':{
'avg':728
}
}
def standardize_num_feature(feature, values):
scaler_config = numeric_scalers[feature]
normalizer = lambda x: (min(int(x), scaler_config['valid_max']) - scaler_config['avg']) / scaler_config['stddev']
return list([normalizer(value) for value in values])
def get_cicled_feature_value(value, max_value):
value_scaled = (value + 0.000001) / max_value
value_sin = np.sin(2*np.pi*value_scaled)
value_cos = np.cos(2*np.pi*value_scaled)
return value_sin, value_cos
def process_session_clicks_features(sessions_hour, get_article_text_length_fn):
sessions = []
session_count = 0
clicked_articles_ids = []
unique_clicked_articles = set()
#Normalizing numerical features (standardization) and creating time features
for session in sessions_hour:
session_count += 1
#TODO add session view here
for click in session['clicks']:
# local_hour, local_weekday = extract_local_hour_weekday(click['timestamp']//1000,
# "Europe/Oslo")
local_hour, local_weekday = extract_local_hour_weekday(click['timestamp']//1000,
"Asia/Ho_Chi_Minh")
#Normalizing weekday feature
click['weekday'] = (local_weekday+1-3.5)/7
#Transforming the hour in two "cyclic" features, so that the network
#can understand, for example, that there is one hour of difference between both 11pm to 0am and from 0am to 1am
click['time_hour_sin'], click['time_hour_cos'] = get_cicled_feature_value(local_hour, 24)
#Applying standardization on elapsed time
click['_elapsed_ms_since_last_click'] = standardize_num_feature('_elapsed_ms_since_last_click', [click['_elapsed_ms_since_last_click']])[0]
#If active_time_secs is not available, use the average
if 'active_time_secs' not in click:
click['active_time_secs'] = numeric_scalers['active_time_secs']['avg']
#Normalizing reading time by article length (#words)
click['active_time_secs_by_word'] = click['active_time_secs'] / get_article_text_length_fn(click['article_id'])
#Applying standardization
click['active_time_secs_by_word'] = standardize_num_feature('active_time_secs_by_word', [click['active_time_secs_by_word']])[0]
#Removing unnormalized feature
del click['active_time_secs']
#Applying standardization in this feature
#click['active_time_secs'] = standardize_num_feature('active_time_secs', [click['active_time_secs']])[0]
#Copying click attributes as lists in the session
for key in click:
if key != "user_id":
if key not in session:
session[key] = [click[key]]
else:
session[key].append(click[key])
clicked_articles_ids.append(click['article_id'])
unique_clicked_articles.add(click['article_id'])
#Removing clicks property, as its values were copied to individual list columns
del session['clicks']
sessions.append(session)
#Ensuring sessions within the hour are sorted by session id (time)
sessions_df = pd.DataFrame(sessions).sort_values('session_id')
#Printing stats
# print("clicked_articles_ids")
# print(clicked_articles_ids)
clicks_by_articles_counter = dict(Counter(clicked_articles_ids))
clicks_by_articles = np.array(list(clicks_by_articles_counter.values()))
total_clicks = np.sum(clicks_by_articles)
clicks_by_articles_norm = clicks_by_articles / total_clicks
clicks_by_articles_norm_mean = np.mean(clicks_by_articles_norm)
clicks_by_articles_norm_median = np.median(clicks_by_articles_norm)
stats = {'session_count': session_count,
'clicks': total_clicks,
'clicks_by_session': total_clicks / session_count,
'unique_articles': len(unique_clicked_articles),
'clicks_by_article':float(total_clicks)/len(unique_clicked_articles),
'norm_pop_mean': clicks_by_articles_norm_mean,
'norm_pop_median': clicks_by_articles_norm_median,
'gini_index': gini_index(clicks_by_articles.astype(np.float32))
}
print("Stats :{}".format(stats))
# sessions_df: pandas dataframe
# stats: dictionary
# clicks_by_articles_counter: dictionary
return sessions_df, stats, clicks_by_articles_counter
def make_sequence_example(row):
context_features = {
'session_id': tf.train.Feature(int64_list=tf.train.Int64List(value=[row['session_id']])),
'session_size': tf.train.Feature(int64_list=tf.train.Int64List(value=[row['session_size']])),
'session_start': tf.train.Feature(int64_list=tf.train.Int64List(value=[row['session_start']])),
'user_id': tf.train.Feature(bytes_list=tf.train.BytesList(value=[row['user_id'].encode()])),
}
context = tf.train.Features(feature=context_features)
sequence_features = {
'event_timestamp': make_sequential_feature(row["timestamp"]),
#Categorical features
'item_clicked': make_sequential_feature(row["article_id"]),
'city': make_sequential_feature(row["city"]),
# 'region': make_sequential_feature(row["region"]),
# 'country': make_sequential_feature(row["country"]),
# 'device': make_sequential_feature(row["device"]),
'os': make_sequential_feature(row["os"]),
# 'referrer_class': make_sequential_feature(row["referrer_class"]),
'weekday': make_sequential_feature(row["weekday"], vtype=float),
'local_hour_sin': make_sequential_feature(row["time_hour_sin"], vtype=float),
'local_hour_cos': make_sequential_feature(row["time_hour_cos"], vtype=float),
'user_elapsed_ms_since_last_click': make_sequential_feature(row["_elapsed_ms_since_last_click"], vtype=float),
'active_time_secs_by_word': make_sequential_feature(row["active_time_secs_by_word"], vtype=float),
#To debug
'url': make_sequential_feature(row["url"], vtype=str),
}
sequence_feature_lists = tf.train.FeatureLists(feature_list=sequence_features)
return tf.train.SequenceExample(feature_lists=sequence_feature_lists,
context=context
)
def export_sessions_hour_to_tf_records(hour_index, sessions_df, output_path):
export_file_template = output_path.replace('*', '{0:04d}')
print("Exporting hour {} (# sessions: {})".format(hour_index, len(sessions_df)))
save_rows_to_tf_record_file(map(lambda x: x[1], sessions_df.iterrows()),
make_sequence_example,
export_filename=export_file_template.format(hour_index))
def save_nar_preprocessing_resources(output_path, nar_label_encoders_dict, nar_numeric_scalers):
to_serialize = {'nar_label_encoders': nar_label_encoders_dict,
'nar_numeric_scalers': nar_numeric_scalers}
serialize(output_path, to_serialize)
def compute_total_clicks_by_article_stats(clicks_by_articles_counters):
result = defaultdict(int)
for hour_counters in clicks_by_articles_counters:
for article_key in hour_counters.keys():
result[article_key] += hour_counters[article_key]
return result
def delete_all_file_in_path(path):
files = glob.glob(path+'*')
for f in files:
os.remove(f)
def get_date_time_current():
now = datetime.now()
timestamp = int(datetime.timestamp(now))
return timestamp
def parse_newsId_from_url(url):
parse_str = re.search('(?<=-)([\d]+|[\d]+rf[\d]+)(?=.chn)',url)
if parse_str:
parse_str = parse_str.group()
# parse "newsId1rfnewsId2" for popup, return newsId1
# example: cafebiz.vn/te-nuoc-theo-mua-viet-nam-co-nam-co-hoi-giam-lai-suat-dieu-hanh-201908121346105rf20190925103622081.chn
if "rf" in parse_str:
return int(parse_str.split("rf")[0])
return int(parse_str)
else:
return "'<PAD>'"
def preprocess_for_predict(user_id,news_id, get_article_text_length_fn):
# print("==========> Test into preprocess_for_predict")
session = {}
redis = RedisClient("localhost")
page_view_list = redis.getPageView(user_id)
if len(page_view_list) == 0: # empty, new user, have not log redis
# print("=>>>>>>>pageview is empty")
tor = numeric_scalers['active_time_secs']['avg']# i give agv
page_view = PageView("-" + news_id + ".chn", get_date_time_current(), 0, tor)
page_view_list.append(page_view)
user_info = Session(user_id, 0,get_date_time_current(), 1)
# user_info.guid = user_id
# user_info.locId = 0
# user_info.osCode = 1
# user_info.timeNow = get_date_time_current()
session['session_size'] = len(page_view_list)
session['session_id'] = user_info.timeNow
session['session_start'] = user_info.timeNow
session['user_id'] = user_info.guid
else:
# count agv tor pageview
# print("=>>>>>>>pageview is no empty")
tor = 0
for i in range(0, len(page_view_list)):
tor += page_view_list[i].timeOnRead
tor = tor/len(page_view_list)
page_view = PageView("-"+news_id+".chn",get_date_time_current(),0,tor)
page_view_list.append(page_view)
# print("<<<<<<<<<<<,,page_view_list>>>>>>>>>>>>")
# for i in range(0, len(page_view_list)):
# print(page_view_list[i])
# print(page_view_list)
user_info = redis.getUserInfo(user_id)
session['session_size'] = len(page_view_list)
session['session_id'] = user_info.timeNow
session['session_start'] = user_info.timeNow
session['user_id'] = user_info.guid
#Get output filename
output_file_name = str(user_info.timeNow)+"_"+str(user_info.guid)+".tfrecord.gz"
clicks = []
pickle =ACR_Pickle_Singleton.getInstance()
for pv in page_view_list:
click = {}
click['_elapsed_ms_since_last_click'] = (pv.timeNow - user_info.timeNow)*1000
click['active_time_secs'] = pv.timeOnRead
# print("============================================="+ str(parse_newsId_from_url(pv.url)))
click['article_id'] = pickle.get_article_id_encoded(parse_newsId_from_url(pv.url))
click['city'] = user_info.locId
click['os'] = user_info.osCode
click['timestamp'] = pv.timeNow * 1000
click['url'] = pv.url
click['user_id'] = user_info.guid
# test tungtv
# print(" click['user_id'] {}:".format(click['user_id']))
# print(" click['article_id'] {}".format(click['article_id']))
clicks.append(click)
session['clicks'] = clicks
sessions = []
session_count = 0
clicked_articles_ids = []
unique_clicked_articles = set()
#Normalizing numerical features (standardization) and creating time features
#TODO add session view here
for click in session['clicks']:
# local_hour, local_weekday = extract_local_hour_weekday(click['timestamp']//1000,
# "Europe/Oslo")
local_hour, local_weekday = extract_local_hour_weekday(click['timestamp']//1000,
"Asia/Ho_Chi_Minh")
#Normalizing weekday feature
click['weekday'] = (local_weekday+1-3.5)/7
#Transforming the hour in two "cyclic" features, so that the network
#can understand, for example, that there is one hour of difference between both 11pm to 0am and from 0am to 1am
click['time_hour_sin'], click['time_hour_cos'] = get_cicled_feature_value(local_hour, 24)
#Applying standardization on elapsed time
click['_elapsed_ms_since_last_click'] = standardize_num_feature('_elapsed_ms_since_last_click', [click['_elapsed_ms_since_last_click']])[0]
#If active_time_secs is not available, use the average
if 'active_time_secs' not in click:
click['active_time_secs'] = numeric_scalers['active_time_secs']['avg']
#Normalizing reading time by article length (#words)
click['active_time_secs_by_word'] = click['active_time_secs'] / get_article_text_length_fn(click['article_id'])
#Applying standardization
click['active_time_secs_by_word'] = standardize_num_feature('active_time_secs_by_word', [click['active_time_secs_by_word']])[0]
#Removing unnormalized feature
del click['active_time_secs']
#Applying standardization in this feature
#click['active_time_secs'] = standardize_num_feature('active_time_secs', [click['active_time_secs']])[0]
#Copying click attributes as lists in the session
for key in click:
if key != "user_id":
if key not in session:
session[key] = [click[key]]
else:
session[key].append(click[key])
clicked_articles_ids.append(click['article_id'])
unique_clicked_articles.add(click['article_id'])
#Removing clicks property, as its values were copied to individual list columns
del session['clicks']
sessions.append(session)
#Ensuring sessions within the hour are sorted by session id (time)
sessions_df = pd.DataFrame(sessions).sort_values('session_id')
output_file = "./nardata/tmp/"+output_file_name
os.makedirs("./nardata/tmp/", exist_ok=True)
# save_rows_to_tf_record_file(map(lambda x: x[1], sessions_df.iterrows()), make_sequence_example, output_file)
# return output_file
a = map(lambda x: make_sequence_example(x[1]), sessions_df.iterrows())
for row in sessions_df.iterrows():
seq_example = make_sequence_example(row[1])
return seq_example.SerializeToString()
return a
def split_string(path):
afiles = []
for root, dirs, files in os.walk(path):
for filename in files:
afiles.append(filename)
afiles.sort()
string = afiles[-1].split('.')[0]
return int(string.split('_')[-1])
def delete_file_keep_in_two_week(path, num_hour):
afiles = []
for root, dirs, files in os.walk(path):
for filename in files:
afiles.append(filename)
afiles.sort()
# a = 24*14
files = afiles[:-num_hour]
for f in files:
os.remove(path + "/" + f)
def main_nar_preprocess_2():
#def main():
# parser = create_args_parser()
# args = parser.parse_args()
print("<=== STARTING NAR PREPROCESS 2 ===> ")
# parameter = load_json_config("./parameter.json")
parameter = load_json_config("./parameter.json")
list_args = parameter["nar_preprocess_2"]
DATA_DIR = parameter["DATA_DIR"]
num_day = list_args["num_day"]
input_sessions_json_folder_path = DATA_DIR + list_args["input_sessions_json_folder_path"]
input_acr_metadata_embeddings_path = DATA_DIR + list_args["input_acr_metadata_embeddings_path"]
input_nar_encoders_dict_path = DATA_DIR + list_args["input_nar_encoders_dict_path"]
number_hours_to_preprocess = list_args["number_hours_to_preprocess"]
output_nar_preprocessing_resources_path = DATA_DIR + list_args["output_nar_preprocessing_resources_path"]
output_sessions_tfrecords_path = DATA_DIR + list_args["output_sessions_tfrecords_path"]
if path.exists(output_nar_preprocessing_resources_path):
pass
else:
import os
os.makedirs(output_nar_preprocessing_resources_path)
print('Loading resources generated ACR module (articles metadata)')
# truyen file
get_article_text_length_fn, get_article_id_encoded_fn = load_acr_module_resources(get_all_file(input_acr_metadata_embeddings_path)[0])
#get_article_text_length_fn = None
# # degub
# print(get_article_text_length_fn)
print('Loading resources generated by the first step of NAR preprocessing (cat. features dict encoders)')
nar_encoders_dict = load_nar_module_resources(get_all_file(input_nar_encoders_dict_path)[0])
print('Loading sessions from folder: {}'.format(input_sessions_json_folder_path))
print('Exporting TFRecords to: {}'.format(output_sessions_tfrecords_path))
# delete file .part*
# from subprocess import Popen
# var1 = DATA_DIR+input_sessions_json_folder_path+"session_hour=*/.*"
# Process = Popen(['./nar_module/scripts/remove_hiden_file.sh %s' % str(var1)], shell=True)
import os
var1 ='rm -rf '+ input_sessions_json_folder_path + "/session_hour=*/.*"
print(var1)
myCmd = var1
if os.system(myCmd) !=0 :
print("Xoa thanh cong")
else:
print("Xoa That bai")
# split path output_sessions_tfrecords_path
path_tf = DATA_DIR +'/'+list_args["output_sessions_tfrecords_path"].split('/')[1]
if path.exists(path_tf):
pass
else:
import os
os.makedirs(path_tf)
clicks_by_articles_counters = []
#a = preprocess_for_predict("2265891616712405988", get_article_text_length_fn, get_article_id_encoded_fn)
for (hour_index, sessions_hour) in load_sessions_hours(input_sessions_json_folder_path):
# check directory empty:
if len(os.listdir(DATA_DIR+"/sessions_tfrecords_by_hour/")) != 0:
hour_index = split_string(DATA_DIR+"/sessions_tfrecords_by_hour/")+1
print('Processing hour {}'.format(hour_index))
####compute_global_metrics(sessions_hour)
sessions_hour_df, hour_stats, hour_clicks_by_articles_counter = process_session_clicks_features(sessions_hour, get_article_text_length_fn)
#sessions_hour_df.to_csv('hour-{}-to-debug.csv'.format(hour_index))
hour_stats['_hour_index'] = hour_index
#stats.append(hour_stats)
clicks_by_articles_counters.append(hour_clicks_by_articles_counter)
# sessions_hour_df.to_csv(DATA_DIR+"/sessions_tfrecords_by_hour/sessions_hour_df.csv", index=False)
export_sessions_hour_to_tf_records(hour_index, sessions_hour_df,
output_path=output_sessions_tfrecords_path)
# print('')
# if number_hours_to_preprocess >= 0 and hour_index == number_hours_to_preprocess:
# break
print()
print('Exporting Categorical Feature encoders and Numeric scalers dicts: {}'.format(output_nar_preprocessing_resources_path))
save_nar_preprocessing_resources(output_nar_preprocessing_resources_path + "nar_preprocessing_resources.pickle",
nar_encoders_dict,
numeric_scalers)
# delete to keep tf record in 2 week nearest
# after export tfrecord for trainning, delete all file in input_sessions_json_folder_path
if len(os.listdir(DATA_DIR + "/sessions_tfrecords_by_hour/")) > 24*num_day:
delete_file_keep_in_two_week(DATA_DIR+"/sessions_tfrecords_by_hour/", 24*num_day)
# delete_all_file_in_path(input_sessions_json_folder_path)
print("<=== END NAR PREPROCESS 2 ===> ")
if __name__ == '__main__':
main_nar_preprocess_2()
| 42.950342
| 176
| 0.670653
| 0
| 0
| 783
| 0.031216
| 0
| 0
| 0
| 0
| 9,385
| 0.374158
|
54b976c7100ab785c654b0c7ca7597f8b6235530
| 2,979
|
py
|
Python
|
tests/integration/test_labels.py
|
spmistry/crux-python
|
15a6b705d1eec7e789f6f62819429f93e02349c1
|
[
"MIT"
] | null | null | null |
tests/integration/test_labels.py
|
spmistry/crux-python
|
15a6b705d1eec7e789f6f62819429f93e02349c1
|
[
"MIT"
] | null | null | null |
tests/integration/test_labels.py
|
spmistry/crux-python
|
15a6b705d1eec7e789f6f62819429f93e02349c1
|
[
"MIT"
] | null | null | null |
import pytest
@pytest.mark.usefixtures("dataset", "helpers")
def test_add_get_label(dataset, helpers):
file_1 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
label_result = file_1.add_label("label1", "value1")
assert label_result is True
assert file_1.labels.get("label1") == "value1"
@pytest.mark.usefixtures("dataset", "helpers")
def test_add_labels_set_labels(dataset, helpers):
file_1 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
labels = {"label1": "value1", "label2": "value2"}
labels_result = file_1.add_labels(labels)
assert labels_result is True
assert file_1.labels == labels
# Negative Test case which verifies label search by searching unset labels without pagination.
@pytest.mark.usefixtures("dataset", "helpers")
def test_search_label(dataset, helpers):
file_1 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
file_2 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
label_result_1 = file_1.add_label("label1", "value1")
label_result_2 = file_2.add_label("label1", "value1")
assert label_result_1 is True
assert label_result_2 is True
predicates = [{"op": "eq", "key": "label4", "val": "value4"}]
resources = dataset.find_resources_by_label(predicates=predicates)
resource_ids = [resource.id for resource in resources]
assert len(resource_ids) == 0
# Negative Test case which verifies label search by searching unset labels with pagination.
@pytest.mark.usefixtures("dataset", "helpers")
def test_search_label_page(dataset, helpers):
file_1 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
file_2 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
label_result_1 = file_1.add_label("label2", "value2")
label_result_2 = file_2.add_label("label2", "value2")
assert label_result_1 is True
assert label_result_2 is True
predicates = [{"op": "eq", "key": "label3", "val": "value3"}]
resources = dataset.find_resources_by_label(predicates=predicates, max_per_page=1)
resource_ids = [resource.id for resource in resources]
assert len(resource_ids) == 0
@pytest.mark.usefixtures("dataset", "helpers")
def test_delete_label(dataset, helpers):
file_1 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
file_2 = dataset.create_file(
path="/test_file_" + helpers.generate_random_string(16) + ".csv"
)
file_1.add_label("label1", "value1")
file_2.add_label("label1", "value1")
d1_result = file_1.delete_label(label_key="label1")
assert d1_result is True
d2_result = file_2.delete_label(label_key="label1")
assert d2_result is True
| 35.891566
| 94
| 0.700906
| 0
| 0
| 0
| 0
| 2,763
| 0.927492
| 0
| 0
| 671
| 0.225243
|
54b9924021536e75d5d98199ebdf2f58b7c84e9c
| 15,384
|
py
|
Python
|
bindings/python/cntk/utils/__init__.py
|
MSXC/CNTK
|
d223d48b411bc994acd465ed333c9f6bed64dd7f
|
[
"RSA-MD"
] | null | null | null |
bindings/python/cntk/utils/__init__.py
|
MSXC/CNTK
|
d223d48b411bc994acd465ed333c9f6bed64dd7f
|
[
"RSA-MD"
] | null | null | null |
bindings/python/cntk/utils/__init__.py
|
MSXC/CNTK
|
d223d48b411bc994acd465ed333c9f6bed64dd7f
|
[
"RSA-MD"
] | null | null | null |
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
import sys
import numbers
import collections
import copy
import numpy as np
from numbers import Number
from scipy import sparse
from .. import cntk_py
from ..device import use_default_device, cpu
from ..axis import Axis
from cntk.internal import typemap
# To __remove__
from cntk.logging import *
# End to remove
_VARIABLE_OR_FUNCTION = (cntk_py.Variable, cntk_py.Function)
# To __remove__
def one_hot(batch, num_classes, dtype=None, device=None):
import cntk
return cntk.Value.one_hot(batch, num_classes, dtype, device)
# End to remove
def get_data_type(*args):
"""
Calculates the highest precision numpy data type of the provided parameters.
If the parameter is a Function instance, it calculates it based on its
inputs. Placeholders are ignored in the type determination.
Args:
args (number, list, NumPy array, :class:`~cntk.ops.variables.Variable`, or :class:`~cntk.ops.functions.Function`): input
Returns:
np.float32, np.float64, or None
"""
from ..ops.variables import Variable
cntk_dtypes = set()
numpy_dtypes = set()
if len(args) == 1 and isinstance(args, _VARIABLE_OR_FUNCTION):
args = [args]
for arg in args:
if isinstance(arg, Variable) and arg.is_placeholder == True:
continue
if isinstance(arg,
(cntk_py.Variable, cntk_py.Value, cntk_py.NDArrayView)):
if cntk_py.DataType_Double == arg.get_data_type():
cntk_dtypes.add(np.float64)
elif cntk_py.DataType_Float == arg.get_data_type():
cntk_dtypes.add(np.float32)
elif isinstance(arg, np.ndarray):
if arg.dtype not in (np.float32, np.float64):
raise ValueError(
'NumPy type "%s" is not supported' % arg.dtype)
numpy_dtypes.add(arg.dtype.type)
elif isinstance(arg, _VARIABLE_OR_FUNCTION):
var_outputs = arg.outputs
if len(var_outputs) > 1:
raise ValueError(
'expected single output, but got %i' % len(var_outputs))
var_type = var_outputs[0].get_data_type()
if cntk_py.DataType_Double == var_type:
cntk_dtypes.add(np.float64)
else:
cntk_dtypes.add(np.float32)
else:
# We don't know anything so we convert everything to float32. If it
# works, we know the type.
# TODO figure out a better/faster way.
np.asarray(arg, dtype=np.float32)
numpy_dtypes.add(np.float32)
if cntk_dtypes:
if np.float64 in cntk_dtypes:
return np.float64
elif np.float32 in cntk_dtypes:
return np.float32
else:
if np.float64 in numpy_dtypes:
return np.float64
elif np.float32 in numpy_dtypes:
return np.float32
def _is_dense(batch):
if isinstance(batch, np.ndarray):
return True
elif sparse.issparse(batch):
return False
is_dense = True
b = batch
while isinstance(b, list):
b = b[0]
if sparse.issparse(b):
return False
return True
def _ones_like(batch, precision):
'''
Returns a new batch, which has the same format as ``batch`` but all values
set to 1.
Args:
batch (list of NumPy arrays): a list of sequences, which are NumPy arrays
'''
from cntk.internal import sanitize_precision
return [np.ones_like(sample, dtype=sanitize_precision(precision)) for sample in batch]
def get_train_loss(trainer):
'''
Fetch the train loss from the last minibatch and copy it to the CPU in case it is on the GPU.
Args:
trainer (:class:`~cntk.train.trainer.Trainer`): the trainer used.
Returns:
the loss value
'''
# we copy the value so swig does not destroy it when we leave the scope
return copy.copy(trainer.previous_minibatch_loss_average)
def get_train_eval_criterion(trainer):
'''
Fetch the train evaluation criterion (e.g., classification error) from the last minibatch and copy it to the CPU in case it is on the GPU.
Args:
trainer (:class:`Trainer`): the trainer used.
Returns:
the criterion value
'''
# we copy the value so swig does not destroy it when we leave the scope
return copy.copy(trainer.previous_minibatch_evaluation_average)
# Obsolete: All usages should be replaced with the variable_value_to_seq
# procedure below
def value_to_seq(value):
'''
Convert a Value to a sequence of NumPy arrays that have their masked
entries removed.
Args:
value (:class:`~cntk.core.Value`): Value as it is returned by Swig
Returns:
a list of NumPy arrays
'''
np_data = np.asarray(value)
mask = value.mask()
if mask:
mask = np.asarray(mask)
np_data = [seq[mask[idx] != cntk_py.MaskKind_Invalid]
for idx, seq in enumerate(np_data)]
return np_data
def variable_value_to_seq(value, variable):
'''
Convert a Value to a sequence of NumPy arrays that have their masked
entries removed.
Args:
value (:class:`~cntk.core.Value`): Value as it is returned by Swig
Returns:
a list of NumPy arrays
'''
mask = value.mask()
if mask:
value_sequences = value.unpack_variable_value(variable, True, cpu())
return [np.asarray(seq) for seq in value_sequences[0]]
else:
return np.asarray(value)
def eval(op, arguments=None, precision=None, device=None, backward_pass=False, expected_backward=None):
'''
It evaluates ``op`` on the data provided by the reader. This is useful
mainly to explore the operators and for convenient unit testing.
Args:
op (:class:`Function`): operation to evaluate
arguments: maps variables to their input data. The
interpretation depends on the input type:
* `dict`: keys are input variable or names, and values are the input data.
* any other type: if node has a unique input, ``arguments`` is mapped to this input.
For nodes with more than one input, only `dict` is allowed.
In both cases, every sample in the data will be interpreted
as a new sequence. To mark samples as continuations of the
previous sequence, specify ``arguments`` as `tuple`: the
first element will be used as ``arguments``, and the second one will
be used as a list of bools, denoting whether a sequence is a new
one (`True`) or a continuation of the previous one (`False`).
Data should be either NumPy arrays or a
:class:`~cntk.io.MinibatchData` instance.
seq_starts (list of bools or None): if None, every sequence is
treated as a new sequence. Otherwise, it is interpreted as a list of
Booleans that tell whether a sequence is a new sequence (`True`) or a
continuation of the sequence in the same slot of the previous
minibatch (`False`)
precision (str or None): precision being 'float32', 'float64', or
None, in which case it will be determined by inspecting the operator
(costly)
device (:class:`~cntk.device.DeviceDescriptor`, default None): device
this value should be put on
backward_pass (`bool`, optional): whether a backward pass is performed
expected_backward (`dict` or None): keys are variables for which to
compute a backward ouptut. By default (None) all entries from
'arguments' are used
Returns:
mapping of output variables to their values.
'''
if backward_pass:
state, forward_output = op.forward(arguments, op.outputs, op.outputs,
device=device)
if expected_backward is None:
expected_backward = arguments
root_gradients = {v: _ones_like(o, precision) for v, o in
forward_output.items()}
backward_output = op.backward(state, root_gradients, expected_backward)
return forward_output, backward_output
else:
state, forward_output = op.forward(
arguments, op.outputs, None, device=device)
return forward_output, None
class Record(dict):
'''
Easy construction of a record (=immutable singleton class) from keyword arguments.
e.g. r = Record(x = 13, y = 42) ; x = r.x
Args:
kwargs: keyword arguments to turn into the record members
Returns:
A singleton class instance that has all passed kw args as immutable class members.
'''
def __init__(self, **args_dict):
super(Record, self).__init__(args_dict)
self.__dict__.update(args_dict)
def __getattr__(self, key):
if key not in self:
raise AttributeError("record has no attribute '{}'".format(key))
return self[key]
def __setattr__(self, key, value):
raise AttributeError('record is immutable')
def updated_with(self, **kwargs):
'''
Create a new Record from an existing one with members modified or added.
e.g. r = Record(x = 13) ; print(r.x) ; r2 = r.updated_with(x = 42) ; print(r2.x)
Args:
kwargs: keyword arguments to turn into the record members
Returns:
A singleton class instance that has all passed kw args as immutable class members.
'''
d = dict(**self) # make it mutable
d.update(kwargs) # merge the new items
return Record(**d) # lock it up again
def get_python_function_arguments(f):
'''
Helper to get the parameter names and annotations of a Python function.
'''
# Note that we only return non-optional arguments (we assume that any optional args are not specified).
# This allows to, e.g., accept max(a, b, *more, name='') as a binary function
import sys
if sys.version_info.major >= 3:
from inspect import getfullargspec
else:
def getfullargspec(f):
from inspect import getargspec
annotations = getattr(f, '__annotations__', {})
#f.__annotations__ = None # needed when faking it under Python 3 for debugging purposes
a = getargspec(f)
#f.__annotations__ = annotations
return Record(args=a.args, varargs=a.varargs, varkw=a.keywords, defaults=a.defaults, kwonlyargs=[], kwonlydefaults=None, annotations=annotations)
param_specs = getfullargspec(f)
annotations = param_specs.annotations
arg_names = param_specs.args
defaults = param_specs.defaults # "if this tuple has n elements, they correspond to the last n elements listed in args"
if defaults:
arg_names = arg_names[:-len(defaults)] # we allow Function(functions with default arguments), but those args will always have default values since CNTK Functions do not support this
return (arg_names, annotations)
def map_function_arguments(params, params_dict, *args, **kwargs):
'''
Helper to determine the argument map for use with various call operations.
Returns a dictionary from parameters to whatever arguments are passed.
Accepted are both positional and keyword arguments.
This mimics Python's argument interpretation, except that keyword arguments are not optional.
This does not require the arguments to be Variables or Functions. It is also called by train_minibatch() and @Signature.
'''
# start with positional arguments
arg_map = dict(zip(params, args))
# now look up keyword arguments
if len(kwargs) != 0:
for name, arg in kwargs.items(): # keyword args are matched by name
if name not in params_dict:
raise TypeError("got an unexpected keyword argument '%s'" % name)
param = params_dict[name]
if param in arg_map:
raise SyntaxError("got multiple values for argument '%s'" % name)
arg_map[param] = arg # add kw argument to dict
assert len(arg_map) == len(params)
return arg_map
def Signature(*args, **kwargs):
'''
``@Signature`` is a decorator to implement the function-argument annotations in Python-2.7,
as needed by the ``@Function`` decorator.
This is only needed when you have not yet migrated to Python 3.x.
Note: Although this is aimed at enabling ``@Function`` syntax with type annotations
in Python 2.7, ``@Signature`` is independent of CNTK and can be used for any argument annotation.
Args:
*args: types of arguments of the function that this decorator is applied to, in the same order.
**kwargs: types of arguments with optional names, e.g. `x=Tensor[42]`. Use this second form for
longer argument lists.
Example::
# Python 3:
@Function
def f(x: Tensor[42]):
return sigmoid(x)
# Python 2.7:
@Function
@Signature(Tensor[42])
def f(x):
return sigmoid(x)
# note that this:
@Function
@Signature(x:int)
def sqr(x):
return x*x
# is identical to:
def sqr(x):
return x*x
sqr.__annotations__ = {'x': int}``
'''
# this function returns another function which is the actual decorator applied to the def:
def add_annotations(f):
# prepare the signature
param_names, annotations = get_python_function_arguments(f)
if annotations:
raise ValueError('@Signature cannot be applied to functions that already have annotations')
annotations = {}
if len(args) + len(kwargs) != len(param_names):
raise TypeError("{} annotations provided for function to be decorated, but function has {} parameters".format(len(args) + len(kwargs), len(param_names)))
# implant anotations into f
params_dict = { name: name for name in param_names }
f.__annotations__ = map_function_arguments(param_names, params_dict, *args, **kwargs)
return f # and return the updated function
return add_annotations
def start_profiler(dir='profiler', sync_gpu=True, reserve_mem=cntk_py.default_profiler_buffer_size):
'''
Start profiler to prepare performance statistics gathering. Note that
the profiler is not enabled after start
(`example
<https://github.com/Microsoft/CNTK/wiki/Performance-Profiler#for-python>`_).
Args:
dir: directory for profiler output
sync_gpu: whether profiler syncs CPU with GPU when timing
reserve_mem: size in byte for profiler memory reserved
'''
cntk_py.start_profiler(dir, sync_gpu, reserve_mem)
def stop_profiler():
'''
Stop profiler from gathering performance statistics and flush them to file
'''
cntk_py.stop_profiler()
def enable_profiler():
'''
Enable profiler to gather data. Note that in training_session, profiler would be enabled automatically after the first check point
'''
cntk_py.enable_profiler()
def disable_profiler():
'''
Disable profiler from gathering data.
'''
cntk_py.disable_profiler()
| 35.528868
| 189
| 0.651586
| 1,307
| 0.084947
| 0
| 0
| 0
| 0
| 0
| 0
| 8,569
| 0.556935
|
54b9d0d77aa935ba65cfcd82b3fdde8db5a12f2f
| 1,457
|
py
|
Python
|
data/data_utils.py
|
ivankreso/LDN
|
76740ef77fcec851f8abc2380251a9491dc0cdc3
|
[
"MIT"
] | 8
|
2020-03-28T15:42:39.000Z
|
2021-07-26T17:40:59.000Z
|
data/data_utils.py
|
ivankreso/LDN
|
76740ef77fcec851f8abc2380251a9491dc0cdc3
|
[
"MIT"
] | 1
|
2021-08-19T08:52:19.000Z
|
2021-08-19T08:52:19.000Z
|
data/data_utils.py
|
ivankreso/LDN
|
76740ef77fcec851f8abc2380251a9491dc0cdc3
|
[
"MIT"
] | 1
|
2021-12-06T08:05:59.000Z
|
2021-12-06T08:05:59.000Z
|
import math
def oversample(all_paths, per_class_split, oversample_ids, class_names):
union = set()
all_sum = 0
print('Oversample stats:')
print('Total images before =', len(all_paths[0]))
for i in oversample_ids:
duplicates = 1
print(f'id = {i} -> {class_names[i]} : num of oversampled =', len(per_class_split[i]))
all_sum += len(per_class_split[i])
for idx in per_class_split[i]:
if idx not in union:
union.add(idx)
for j in range(duplicates):
for paths in all_paths:
paths.append(paths[idx])
print('Total oversampled =', all_sum, '/ union =', len(union))
print('Total images after =', len(all_paths[0]))
def oversample_end(all_paths, num):
for paths in all_paths:
oversample = []
for i in range(num):
oversample.append(paths[-1-i])
paths.extend(oversample)
def print_class_colors(dataset):
for color, name in zip(dataset.class_colors, dataset.class_names):
print(color, '\t', name)
def get_pyramid_loss_scales(downsampling_factor, upsampling_factor):
num_scales = int(math.log2(downsampling_factor // upsampling_factor))
scales = [downsampling_factor]
for i in range(num_scales - 1):
assert scales[-1] % 2 == 0
scales.append(scales[-1] // 2)
return scales
def get_data_bound(dataset):
min_val = (-dataset.mean.max()) / dataset.std.min()
max_val = (255-dataset.mean.min()) / dataset.std.min()
return float(min_val), float(max_val)
| 30.354167
| 90
| 0.680165
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 154
| 0.105697
|
54bbf057df21a564d7a670875ca4d351e87df738
| 1,181
|
py
|
Python
|
src/leetcode_932_beautiful_array.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
src/leetcode_932_beautiful_array.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
src/leetcode_932_beautiful_array.py
|
sungho-joo/leetcode2github
|
ce7730ef40f6051df23681dd3c0e1e657abba620
|
[
"MIT"
] | null | null | null |
# @l2g 932 python3
# [932] Beautiful Array
# Difficulty: Medium
# https://leetcode.com/problems/beautiful-array
#
# An array nums of length n is beautiful if:
#
# nums is a permutation of the integers in the range [1, n].
# For every 0 <= i < j < n, there is no index k with i < k < j where 2 * nums[k] == nums[i] + nums[j].
#
# Given the integer n,return any beautiful array nums of length n.
# There will be at least one valid answer for the given n.
#
# Example 1:
# Input: n = 4
# Output: [2,1,4,3]
# Example 2:
# Input: n = 5
# Output: [3,1,2,5,4]
#
#
# Constraints:
#
# 1 <= n <= 1000
#
#
from typing import List
class Solution:
def beautifulArray(self, n: int) -> List[int]:
def split(arr):
if len(arr) <= 2:
return arr
left, right = [], []
for i in range(len(arr)):
if i % 2:
left.append(arr[i])
else:
right.append(arr[i])
return split(left) + split(right)
return split(list(range(1, n + 1)))
if __name__ == "__main__":
import os
import pytest
pytest.main([os.path.join("tests", "test_932.py")])
| 21.87037
| 102
| 0.556308
| 439
| 0.371719
| 0
| 0
| 0
| 0
| 0
| 0
| 599
| 0.507197
|
54bc320185cf4b126b5fbdb33a31e831a7364c2c
| 1,209
|
py
|
Python
|
objectModel/Python/tests/cdm/cdm_collection/cdm_collection_helper_functions.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
objectModel/Python/tests/cdm/cdm_collection/cdm_collection_helper_functions.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | 3
|
2021-05-11T23:57:12.000Z
|
2021-08-04T05:03:05.000Z
|
objectModel/Python/tests/cdm/cdm_collection/cdm_collection_helper_functions.py
|
aaron-emde/CDM
|
9472e9c7694821ac4a9bbe608557d2e65aabc73e
|
[
"CC-BY-4.0",
"MIT"
] | null | null | null |
from cdm.objectmodel import CdmCorpusDefinition, CdmManifestDefinition
from cdm.storage import LocalAdapter
from cdm.enums import CdmObjectType
def generate_manifest(local_root_path: str) -> 'CdmManifestDefinition':
"""
Creates a manifest used for the tests.
"""
cdmCorpus = CdmCorpusDefinition()
cdmCorpus.storage.default_namespace = 'local'
adapter = LocalAdapter(root=local_root_path)
cdmCorpus.storage.mount('local', adapter)
# add cdm namespace
cdmCorpus.storage.mount('cdm', adapter)
manifest = CdmManifestDefinition(cdmCorpus.ctx, 'manifest')
manifest.folder_path = '/'
manifest.namespace = 'local'
return manifest
def create_document_for_entity(cdm_corpus: 'CdmCorpusDefinition', entity: 'CdmEntityDefinition', nameSpace: str = 'local'):
"""
For an entity, it creates a document that will contain the entity.
"""
cdm_folder_def = cdm_corpus.storage.fetch_root_folder(nameSpace)
entity_doc = cdm_corpus.ctx.corpus.make_object(CdmObjectType.DOCUMENT_DEF, '{}.cdm.json'.format(entity.entity_name), False)
cdm_folder_def.documents.append(entity_doc)
entity_doc.definitions.append(entity)
return entity_doc
| 35.558824
| 127
| 0.746071
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 287
| 0.237386
|
54bc883a34e91f4283ceaf8207e99c37307465c6
| 894
|
py
|
Python
|
asynchronous/py27/asynchronous/producer_consumer/async_eventlet.py
|
fs714/concurrency-example
|
fbff041804b9c46fb7f21ebbae22acff745c7b0c
|
[
"Apache-2.0"
] | null | null | null |
asynchronous/py27/asynchronous/producer_consumer/async_eventlet.py
|
fs714/concurrency-example
|
fbff041804b9c46fb7f21ebbae22acff745c7b0c
|
[
"Apache-2.0"
] | null | null | null |
asynchronous/py27/asynchronous/producer_consumer/async_eventlet.py
|
fs714/concurrency-example
|
fbff041804b9c46fb7f21ebbae22acff745c7b0c
|
[
"Apache-2.0"
] | 1
|
2020-03-10T15:47:05.000Z
|
2020-03-10T15:47:05.000Z
|
import eventlet
from eventlet.green import urllib2
import logging
logging.basicConfig()
logger = logging.getLogger(__file__)
logger.setLevel(logging.DEBUG)
def consumer(task_queue):
while True:
next_task = task_queue.get()
next_task()
task_queue.task_done()
class Task(object):
def __init__(self, url):
self.url = url
def __call__(self):
res = urllib2.urlopen(self.url).read()
logger.info('In green thread: ' + res)
return res
if __name__ == '__main__':
url = 'http://127.0.0.1/1'
num_consumers = 10
num_tasks = 100
task_queue = eventlet.Queue()
pool = eventlet.GreenPool()
for i in xrange(num_consumers):
pool.spawn(consumer, task_queue)
for i in xrange(num_tasks):
task_queue.put(Task(url))
logger.info('async_call finish loop ' + str(i))
task_queue.join()
| 21.285714
| 55
| 0.644295
| 209
| 0.233781
| 0
| 0
| 0
| 0
| 0
| 0
| 74
| 0.082774
|
54bcc1399279abf79ea8c42b52f38e4ad74979ae
| 1,155
|
py
|
Python
|
models.py
|
zhangjingqiang/qiang-tools
|
73fcb896bfec14f1ed668a1ef81526d80c80082f
|
[
"MIT"
] | null | null | null |
models.py
|
zhangjingqiang/qiang-tools
|
73fcb896bfec14f1ed668a1ef81526d80c80082f
|
[
"MIT"
] | null | null | null |
models.py
|
zhangjingqiang/qiang-tools
|
73fcb896bfec14f1ed668a1ef81526d80c80082f
|
[
"MIT"
] | null | null | null |
from flask.ext.login import UserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from app import db
class User(UserMixin, db.Model):
"""
User who can use this application.
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
def __init__(self, username, password):
self.username = username
self.password = password
@property
def password(self):
raise AttributeError('password is not readable')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def __repr__(self):
return '<User %r>' % self.username
class Tool(db.Model):
"""
Tools details.
"""
__tablename__ = 'tools'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String())
def __repr__(self):
return '<Tool {}>'.format(self.id)
| 26.25
| 73
| 0.665801
| 1,019
| 0.882251
| 0
| 0
| 202
| 0.174892
| 0
| 0
| 150
| 0.12987
|
54bd473259faa4301d10d34795bb5bf05e6048e5
| 32,426
|
py
|
Python
|
sysinv/sysinv/sysinv/sysinv/api/controllers/v1/controller_fs.py
|
etaivan/stx-config
|
281e1f110973f96e077645fb01f67b646fc253cc
|
[
"Apache-2.0"
] | null | null | null |
sysinv/sysinv/sysinv/sysinv/api/controllers/v1/controller_fs.py
|
etaivan/stx-config
|
281e1f110973f96e077645fb01f67b646fc253cc
|
[
"Apache-2.0"
] | null | null | null |
sysinv/sysinv/sysinv/sysinv/api/controllers/v1/controller_fs.py
|
etaivan/stx-config
|
281e1f110973f96e077645fb01f67b646fc253cc
|
[
"Apache-2.0"
] | null | null | null |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 UnitedStack Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Copyright (c) 2013-2018 Wind River Systems, Inc.
#
import jsonpatch
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from sysinv.api.controllers.v1 import base
from sysinv.api.controllers.v1 import collection
from sysinv.api.controllers.v1 import link
from sysinv.api.controllers.v1 import types
from sysinv.api.controllers.v1 import utils
from sysinv.common import constants
from sysinv.common import exception
from sysinv.common import health
from sysinv.common import utils as cutils
from sysinv import objects
from sysinv.openstack.common import log
from sysinv.openstack.common.gettextutils import _
from fm_api import constants as fm_constants
from sysinv.common.storage_backend_conf import StorageBackendConfig
LOG = log.getLogger(__name__)
class ControllerFsPatchType(types.JsonPatchType):
@staticmethod
def mandatory_attrs():
return []
class ControllerFs(base.APIBase):
"""API representation of a controller_fs.
This class enforces type checking and value constraints, and converts
between the internal object model and the API representation of
a ControllerFs.
The database GiB of controller_fs - maps to
/var/lib/postgresql (pgsql-lv)
The image GiB of controller_fs - maps to
/opt/cgcs (cgcs-lv)
The image conversion GiB of controller_fs - maps to
/opt/img-conversions (img-conversions-lv)
The backup GiB of controller_fs - maps to
/opt/backups (backup-lv)
The scratch GiB of controller_fs - maps to
/scratch (scratch-lv)
The extension GiB of controller_fs - maps to
/opt/extension (extension-lv)
The gnocchi GiB of controller_fs - maps to
/opt/gnocchi (gnocchi-lv)
"""
uuid = types.uuid
"Unique UUID for this controller_fs"
name = wsme.wsattr(wtypes.text, mandatory=True)
size = int
logical_volume = wsme.wsattr(wtypes.text)
replicated = bool
state = wtypes.text
"The state of controller_fs indicates a drbd file system resize operation"
forisystemid = int
"The isystemid that this controller_fs belongs to"
isystem_uuid = types.uuid
"The UUID of the system this controller_fs belongs to"
action = wtypes.text
"Represent the action on the controller_fs"
links = [link.Link]
"A list containing a self link and associated controller_fs links"
created_at = wtypes.datetime.datetime
updated_at = wtypes.datetime.datetime
def __init__(self, **kwargs):
self.fields = list(objects.controller_fs.fields.keys())
for k in self.fields:
setattr(self, k, kwargs.get(k))
# API-only attribute)
self.fields.append('action')
setattr(self, 'action', kwargs.get('action', None))
@classmethod
def convert_with_links(cls, rpc_controller_fs, expand=True):
controller_fs = ControllerFs(**rpc_controller_fs.as_dict())
if not expand:
controller_fs.unset_fields_except(['created_at',
'updated_at',
'uuid',
'name',
'size',
'logical_volume',
'replicated',
'state',
'isystem_uuid'])
# never expose the isystem_id attribute
controller_fs.isystem_id = wtypes.Unset
# we display the cgcs file system as glance to the customer
if controller_fs.name == constants.FILESYSTEM_NAME_CGCS:
controller_fs.name = constants.FILESYSTEM_DISPLAY_NAME_CGCS
# never expose the isystem_id attribute, allow exposure for now
# controller_fs.forisystemid = wtypes.Unset
controller_fs.links = [
link.Link.make_link('self', pecan.request.host_url,
'controller_fs', controller_fs.uuid),
link.Link.make_link('bookmark', pecan.request.host_url,
'controller_fs', controller_fs.uuid,
bookmark=True)
]
return controller_fs
class ControllerFsCollection(collection.Collection):
"""API representation of a collection of ControllerFs."""
controller_fs = [ControllerFs]
"A list containing ControllerFs objects"
def __init__(self, **kwargs):
self._type = 'controller_fs'
@classmethod
def convert_with_links(cls, rpc_controller_fs, limit, url=None,
expand=False, **kwargs):
collection = ControllerFsCollection()
collection.controller_fs = [ControllerFs.convert_with_links(p, expand)
for p in rpc_controller_fs]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
def _total_size_controller_multi_fs(controller_fs_new_list):
"""This function is called to verify file system capability on
controller with primary (initial) storage backend already configured
calling from initial config (config_controller stage) will result in
failure
"""
total_size = 0
for fs in controller_fs_new_list:
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
total_size += (2 * fs.size)
else:
total_size += fs.size
return total_size
def _total_size_controller_fs(controller_fs_new, controller_fs_list):
"""This function is called to verify file system capability on
controller with primary (initial) storage backend already configured
calling from initial config (config_controller stage) will result in
failure
"""
total_size = 0
for fs in controller_fs_list:
size = fs['size']
if controller_fs_new and fs['name'] == controller_fs_new['name']:
size = controller_fs_new['size']
if fs['name'] == "database":
size = size * 2
total_size += size
LOG.info(
"_total_size_controller_fs total filesysem size %s" % total_size)
return total_size
def _check_relative_controller_multi_fs(controller_fs_new_list):
"""
This function verifies the relative controller_fs sizes.
:param controller_fs_new_list:
:return: None. Raise Client exception on failure.
"""
if cutils.is_virtual():
return
backup_gib_min = constants.BACKUP_OVERHEAD
for fs in controller_fs_new_list:
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
database_gib = fs.size
backup_gib_min += fs.size
elif fs.name == constants.FILESYSTEM_NAME_CGCS:
cgcs_gib = fs.size
backup_gib_min += fs.size
elif fs.name == constants.FILESYSTEM_NAME_BACKUP:
backup_gib = fs.size
if backup_gib < backup_gib_min:
raise wsme.exc.ClientSideError(_("backup size of %d is "
"insufficient. "
"Minimum backup size of %d is "
"required based upon glance size %d "
"and database size %d. "
"Rejecting modification "
"request." %
(backup_gib,
backup_gib_min,
cgcs_gib,
database_gib
)))
def _check_controller_multi_fs(controller_fs_new_list,
ceph_mon_gib_new=None,
cgtsvg_growth_gib=None):
ceph_mons = pecan.request.dbapi.ceph_mon_get_list()
if not ceph_mon_gib_new:
if ceph_mons:
ceph_mon_gib_new = ceph_mons[0].ceph_mon_gib
else:
ceph_mon_gib_new = 0
LOG.info("_check_controller__multi_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
cgtsvg_max_free_GiB = _get_controller_cgtsvg_limit()
LOG.info("_check_controller_multi_fs cgtsvg_max_free_GiB = %s " %
cgtsvg_max_free_GiB)
_check_relative_controller_multi_fs(controller_fs_new_list)
LOG.info("_check_controller_multi_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
rootfs_configured_size_GiB = \
_total_size_controller_multi_fs(controller_fs_new_list) + ceph_mon_gib_new
LOG.info("_check_controller_multi_fs rootfs_configured_size_GiB = %s" %
rootfs_configured_size_GiB)
if cgtsvg_growth_gib and (cgtsvg_growth_gib > cgtsvg_max_free_GiB):
if ceph_mon_gib_new:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, "
"scratch, backup, extension and ceph-mon exceeds "
"growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
else:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, scratch, "
"backup and extension exceeds growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
raise wsme.exc.ClientSideError(msg)
def _check_relative_controller_fs(controller_fs_new, controller_fs_list):
"""
This function verifies the relative controller_fs sizes.
:param controller_fs_new:
:param controller_fs_list:
:return: None. Raise Client exception on failure.
"""
if cutils.is_virtual():
return
backup_gib = 0
database_gib = 0
cgcs_gib = 0
for fs in controller_fs_list:
if controller_fs_new and fs['name'] == controller_fs_new['name']:
fs['size'] = controller_fs_new['size']
if fs['name'] == "backup":
backup_gib = fs['size']
elif fs['name'] == constants.DRBD_CGCS:
cgcs_gib = fs['size']
elif fs['name'] == "database":
database_gib = fs['size']
if backup_gib == 0:
LOG.info(
"_check_relative_controller_fs backup filesystem not yet setup")
return
# Required mininum backup filesystem size
backup_gib_min = cgcs_gib + database_gib + constants.BACKUP_OVERHEAD
if backup_gib < backup_gib_min:
raise wsme.exc.ClientSideError(_("backup size of %d is "
"insufficient. "
"Minimum backup size of %d is "
"required based on upon "
"glance=%d and database=%d and "
"backup overhead of %d. "
"Rejecting modification "
"request." %
(backup_gib,
backup_gib_min,
cgcs_gib,
database_gib,
constants.BACKUP_OVERHEAD
)))
def _check_controller_state():
"""
This function verifies the administrative, operational, availability of
each controller.
"""
chosts = pecan.request.dbapi.ihost_get_by_personality(
constants.CONTROLLER)
for chost in chosts:
if (chost.administrative != constants.ADMIN_UNLOCKED or
chost.availability != constants.AVAILABILITY_AVAILABLE or
chost.operational != constants.OPERATIONAL_ENABLED):
# A node can become degraded due to not free space available in a FS
# and thus block the resize operation. If the only alarm that degrades
# a controller node is a filesystem alarm, we shouldn't block the resize
# as the resize itself will clear the degrade.
health_helper = health.Health(pecan.request.dbapi)
degrade_alarms = health_helper.get_alarms_degrade(
pecan.request.context,
alarm_ignore_list=[fm_constants.FM_ALARM_ID_FS_USAGE],
entity_instance_id_filter="controller-")
allowed_resize = False
if (not degrade_alarms and
chost.availability == constants.AVAILABILITY_DEGRADED):
allowed_resize = True
if not allowed_resize:
alarm_explanation = ""
if degrade_alarms:
alarm_explanation = "Check alarms with the following IDs: %s" % str(degrade_alarms)
raise wsme.exc.ClientSideError(
_("This operation requires controllers to be %s, %s, %s. "
"Current status is %s, %s, %s. %s." %
(constants.ADMIN_UNLOCKED, constants.OPERATIONAL_ENABLED,
constants.AVAILABILITY_AVAILABLE,
chost.administrative, chost.operational,
chost.availability, alarm_explanation)))
return True
def _get_controller_cgtsvg_limit():
"""Calculate space for controller fs
returns: cgtsvg_max_free_GiB
"""
cgtsvg0_free_mib = 0
cgtsvg1_free_mib = 0
cgtsvg_max_free_GiB = 0
chosts = pecan.request.dbapi.ihost_get_by_personality(
constants.CONTROLLER)
for chost in chosts:
if chost.hostname == constants.CONTROLLER_0_HOSTNAME:
ipvs = pecan.request.dbapi.ipv_get_by_ihost(chost.uuid)
for ipv in ipvs:
if (ipv.lvm_vg_name == constants.LVG_CGTS_VG and
ipv.pv_state != constants.PROVISIONED):
msg = _("Cannot resize filesystem. There are still "
"unprovisioned physical volumes on controller-0.")
raise wsme.exc.ClientSideError(msg)
ilvgs = pecan.request.dbapi.ilvg_get_by_ihost(chost.uuid)
for ilvg in ilvgs:
if (ilvg.lvm_vg_name == constants.LVG_CGTS_VG and
ilvg.lvm_vg_size and ilvg.lvm_vg_total_pe):
cgtsvg0_free_mib = (int(ilvg.lvm_vg_size) *
int(ilvg.lvm_vg_free_pe) / int(
ilvg.lvm_vg_total_pe)) / (1024 * 1024)
break
else:
ipvs = pecan.request.dbapi.ipv_get_by_ihost(chost.uuid)
for ipv in ipvs:
if (ipv.lvm_vg_name == constants.LVG_CGTS_VG and
ipv.pv_state != constants.PROVISIONED):
msg = _("Cannot resize filesystem. There are still "
"unprovisioned physical volumes on controller-1.")
raise wsme.exc.ClientSideError(msg)
ilvgs = pecan.request.dbapi.ilvg_get_by_ihost(chost.uuid)
for ilvg in ilvgs:
if (ilvg.lvm_vg_name == constants.LVG_CGTS_VG and
ilvg.lvm_vg_size and ilvg.lvm_vg_total_pe):
cgtsvg1_free_mib = (int(ilvg.lvm_vg_size) *
int(ilvg.lvm_vg_free_pe) / int(
ilvg.lvm_vg_total_pe)) / (1024 * 1024)
break
LOG.info("_get_controller_cgtsvg_limit cgtsvg0_free_mib=%s, "
"cgtsvg1_free_mib=%s" % (cgtsvg0_free_mib, cgtsvg1_free_mib))
if cgtsvg0_free_mib > 0 and cgtsvg1_free_mib > 0:
cgtsvg_max_free_GiB = min(cgtsvg0_free_mib, cgtsvg1_free_mib) / 1024
LOG.info("min of cgtsvg0_free_mib=%s and cgtsvg1_free_mib=%s is "
"cgtsvg_max_free_GiB=%s" %
(cgtsvg0_free_mib, cgtsvg1_free_mib, cgtsvg_max_free_GiB))
elif cgtsvg1_free_mib > 0:
cgtsvg_max_free_GiB = cgtsvg1_free_mib / 1024
else:
cgtsvg_max_free_GiB = cgtsvg0_free_mib / 1024
LOG.info("SYS_I filesystem limits cgtsvg0_free_mib=%s, "
"cgtsvg1_free_mib=%s, cgtsvg_max_free_GiB=%s"
% (cgtsvg0_free_mib, cgtsvg1_free_mib, cgtsvg_max_free_GiB))
return cgtsvg_max_free_GiB
def _check_controller_fs(controller_fs_new=None,
ceph_mon_gib_new=None,
cgtsvg_growth_gib=None,
controller_fs_list=None):
ceph_mons = pecan.request.dbapi.ceph_mon_get_list()
if not controller_fs_list:
controller_fs_list = pecan.request.dbapi.controller_fs_get_list()
if not ceph_mon_gib_new:
if ceph_mons:
ceph_mon_gib_new = ceph_mons[0].ceph_mon_gib
else:
ceph_mon_gib_new = 0
else:
if ceph_mons:
cgtsvg_growth_gib = ceph_mon_gib_new - ceph_mons[0].ceph_mon_gib
else:
cgtsvg_growth_gib = ceph_mon_gib_new
cgtsvg_max_free_GiB = _get_controller_cgtsvg_limit()
LOG.info("_check_controller_fs ceph_mon_gib_new = %s" % ceph_mon_gib_new)
LOG.info("_check_controller_fs cgtsvg_growth_gib = %s" % cgtsvg_growth_gib)
LOG.info("_check_controller_fs cgtsvg_max_free_GiB = %s" % cgtsvg_max_free_GiB)
_check_relative_controller_fs(controller_fs_new, controller_fs_list)
rootfs_configured_size_GiB = \
_total_size_controller_fs(controller_fs_new,
controller_fs_list) + ceph_mon_gib_new
LOG.info("_check_controller_fs rootfs_configured_size_GiB = %s" %
rootfs_configured_size_GiB)
if cgtsvg_growth_gib and (cgtsvg_growth_gib > cgtsvg_max_free_GiB):
if ceph_mon_gib_new:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, "
"scratch, backup, extension and ceph-mon exceeds "
"growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
else:
msg = _(
"Total target growth size %s GiB for database "
"(doubled for upgrades), glance, img-conversions, scratch, "
"backup and extension exceeds growth limit of %s GiB." %
(cgtsvg_growth_gib, cgtsvg_max_free_GiB)
)
raise wsme.exc.ClientSideError(msg)
def _check_controller_multi_fs_data(context, controller_fs_list_new,
modified_fs):
""" Check controller filesystem data and return growth
returns: cgtsvg_growth_gib
"""
cgtsvg_growth_gib = 0
# Check if we need img_conversions
img_conversion_required = False
lvdisplay_keys = [constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_DATABASE],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_CGCS],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_BACKUP],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_SCRATCH],
constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_GNOCCHI]]
# On primary region, img-conversions always exists in controller_fs DB table.
# On secondary region, if both glance and cinder are sharing from the primary
# region, img-conversions won't exist in controller_fs DB table. We already
# have semantic check not to allow img-conversions resizing.
if (StorageBackendConfig.has_backend(pecan.request.dbapi, constants.SB_TYPE_LVM) or
StorageBackendConfig.has_backend(pecan.request.dbapi, constants.SB_TYPE_CEPH)):
img_conversion_required = True
lvdisplay_keys.append(constants.FILESYSTEM_LV_DICT[constants.FILESYSTEM_NAME_IMG_CONVERSIONS])
if (constants.FILESYSTEM_NAME_IMG_CONVERSIONS in modified_fs and
not img_conversion_required):
raise wsme.exc.ClientSideError(
_("%s is not modifiable: no cinder backend is "
"currently configured.") % constants.FILESYSTEM_NAME_IMG_CONVERSIONS)
lvdisplay_dict = pecan.request.rpcapi.get_controllerfs_lv_sizes(context)
for key in lvdisplay_keys:
if not lvdisplay_dict.get(key, None):
raise wsme.exc.ClientSideError(_("Unable to determine the "
"current size of %s. "
"Rejecting modification "
"request." % key))
for fs in controller_fs_list_new:
lv = fs.logical_volume
if lvdisplay_dict.get(lv, None):
orig = int(float(lvdisplay_dict[lv]))
new = int(fs.size)
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
orig = orig / 2
if orig > new:
raise wsme.exc.ClientSideError(_("'%s' must be at least: "
"%s" % (fs.name, orig)))
if fs.name == constants.FILESYSTEM_NAME_DATABASE:
cgtsvg_growth_gib += 2 * (new - orig)
else:
cgtsvg_growth_gib += (new - orig)
LOG.info("_check_controller_multi_fs_data cgtsvg_growth_gib=%s" %
cgtsvg_growth_gib)
return cgtsvg_growth_gib
LOCK_NAME = 'ControllerFsController'
class ControllerFsController(rest.RestController):
"""REST controller for ControllerFs."""
_custom_actions = {
'detail': ['GET'],
'update_many': ['PUT'],
}
def __init__(self, from_isystems=False):
self._from_isystems = from_isystems
def _get_controller_fs_collection(self, isystem_uuid, marker, limit,
sort_key, sort_dir, expand=False,
resource_url=None):
if self._from_isystems and not isystem_uuid:
raise exception.InvalidParameterValue(_(
"System id not specified."))
limit = utils.validate_limit(limit)
sort_dir = utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.controller_fs.get_by_uuid(
pecan.request.context, marker)
if isystem_uuid:
controller_fs = pecan.request.dbapi.controller_fs_get_by_isystem(
isystem_uuid, limit,
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
else:
controller_fs = \
pecan.request.dbapi.controller_fs_get_list(limit, marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
return ControllerFsCollection.convert_with_links(controller_fs, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@wsme_pecan.wsexpose(ControllerFsCollection, types.uuid, types.uuid, int,
wtypes.text, wtypes.text)
def get_all(self, isystem_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of controller_fs."""
return self._get_controller_fs_collection(isystem_uuid, marker, limit,
sort_key, sort_dir)
@wsme_pecan.wsexpose(ControllerFsCollection, types.uuid, types.uuid, int,
wtypes.text, wtypes.text)
def detail(self, isystem_uuid=None, marker=None, limit=None,
sort_key='id', sort_dir='asc'):
"""Retrieve a list of controller_fs with detail."""
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "controller_fs":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['controller_fs', 'detail'])
return self._get_controller_fs_collection(isystem_uuid, marker, limit,
sort_key, sort_dir,
expand, resource_url)
@wsme_pecan.wsexpose(ControllerFs, types.uuid)
def get_one(self, controller_fs_uuid):
"""Retrieve information about the given controller_fs."""
if self._from_isystems:
raise exception.OperationNotPermitted
rpc_controller_fs = \
objects.controller_fs.get_by_uuid(pecan.request.context,
controller_fs_uuid)
return ControllerFs.convert_with_links(rpc_controller_fs)
@cutils.synchronized(LOCK_NAME)
@wsme.validate(types.uuid, [ControllerFsPatchType])
@wsme_pecan.wsexpose(ControllerFs, types.uuid,
body=[ControllerFsPatchType])
def patch(self, controller_fs_uuid, patch):
"""Update the current controller_fs configuration."""
raise exception.OperationNotPermitted
@cutils.synchronized(LOCK_NAME)
@wsme.validate(types.uuid, [ControllerFsPatchType])
@wsme_pecan.wsexpose(ControllerFs, types.uuid, body=[[ControllerFsPatchType]])
def update_many(self, isystem_uuid, patch):
"""Update the current controller_fs configuration."""
if self._from_isystems and not isystem_uuid:
raise exception.InvalidParameterValue(_(
"System id not specified."))
# Validate input filesystem names
controller_fs_list = pecan.request.dbapi.controller_fs_get_list()
valid_fs_list = []
if controller_fs_list:
valid_fs_list = {fs.name: fs.size for fs in controller_fs_list}
reinstall_required = False
reboot_required = False
force_resize = False
modified_fs = []
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/action':
value = p_obj['value']
patch.remove(p_list)
if value == constants.FORCE_ACTION:
force_resize = True
LOG.info("Force action resize selected")
break
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/name':
fs_display_name = p_obj['value']
if fs_display_name == constants.FILESYSTEM_DISPLAY_NAME_CGCS:
fs_name = constants.FILESYSTEM_NAME_CGCS
else:
fs_name = fs_display_name
elif p_obj['path'] == '/size':
size = p_obj['value']
if fs_name not in valid_fs_list.keys() or fs_display_name == constants.FILESYSTEM_NAME_CGCS:
msg = _("ControllerFs update failed: invalid filesystem "
"'%s' " % fs_display_name)
raise wsme.exc.ClientSideError(msg)
elif not cutils.is_int_like(size):
msg = _("ControllerFs update failed: filesystem '%s' "
"size must be an integer " % fs_display_name)
raise wsme.exc.ClientSideError(msg)
elif int(size) <= int(valid_fs_list[fs_name]):
msg = _("ControllerFs update failed: size for filesystem '%s' "
"should be bigger than %s " % (
fs_display_name, valid_fs_list[fs_name]))
raise wsme.exc.ClientSideError(msg)
elif (fs_name == constants.FILESYSTEM_NAME_CGCS and
StorageBackendConfig.get_backend(pecan.request.dbapi,
constants.CINDER_BACKEND_CEPH)):
if force_resize:
LOG.warn("Force resize ControllerFs: %s, though Ceph "
"storage backend is configured" % fs_display_name)
else:
raise wsme.exc.ClientSideError(
_("ControllerFs %s size is not modifiable as Ceph is "
"configured. Update size via Ceph Storage Pools." %
fs_display_name))
if fs_name in constants.SUPPORTED_REPLICATED_FILEYSTEM_LIST:
if utils.is_drbd_fs_resizing():
raise wsme.exc.ClientSideError(
_("A drbd sync operation is currently in progress. "
"Retry again later.")
)
modified_fs += [fs_name]
controller_fs_list_new = []
for fs in controller_fs_list:
replaced = False
for p_list in patch:
p_obj_list = jsonpatch.JsonPatch(p_list)
for p_obj in p_obj_list:
if p_obj['path'] == '/name':
if p_obj['value'] == constants.FILESYSTEM_DISPLAY_NAME_CGCS:
p_obj['value'] = constants.FILESYSTEM_NAME_CGCS
if p_obj['value'] == fs['name']:
try:
controller_fs_list_new += [ControllerFs(
**jsonpatch.apply_patch(fs.as_dict(), p_obj_list))]
replaced = True
break
except utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=p_list, reason=e)
if replaced:
break
if not replaced:
controller_fs_list_new += [fs]
cgtsvg_growth_gib = _check_controller_multi_fs_data(
pecan.request.context,
controller_fs_list_new,
modified_fs)
if _check_controller_state():
_check_controller_multi_fs(controller_fs_list_new,
cgtsvg_growth_gib=cgtsvg_growth_gib)
for fs in controller_fs_list_new:
if fs.name in modified_fs:
value = {'size': fs.size}
if fs.replicated:
value.update({'state': constants.CONTROLLER_FS_RESIZING_IN_PROGRESS})
pecan.request.dbapi.controller_fs_update(fs.uuid, value)
try:
# perform rpc to conductor to perform config apply
pecan.request.rpcapi.update_storage_config(
pecan.request.context,
update_storage=False,
reinstall_required=reinstall_required,
reboot_required=reboot_required,
filesystem_list=modified_fs
)
except Exception as e:
msg = _("Failed to update filesystem size ")
LOG.error("%s with patch %s with exception %s" % (msg, patch, e))
raise wsme.exc.ClientSideError(msg)
@wsme_pecan.wsexpose(None, types.uuid, status_code=204)
def delete(self, controller_fs_uuid):
"""Delete a controller_fs."""
raise exception.OperationNotPermitted
@cutils.synchronized(LOCK_NAME)
@wsme_pecan.wsexpose(ControllerFs, body=ControllerFs)
def post(self, controllerfs):
"""Create a new controller_fs."""
raise exception.OperationNotPermitted
| 40.481898
| 104
| 0.587923
| 14,463
| 0.446031
| 0
| 0
| 10,425
| 0.321501
| 0
| 0
| 7,804
| 0.240671
|
54bd765684733907c0e0f4fdff1bc9c5e51272ef
| 1,298
|
py
|
Python
|
tests/test_label_smoothing_ce.py
|
waking95/easy-bert
|
576678343c251a134748941d1aa5e3368786337e
|
[
"MIT"
] | 12
|
2021-12-15T06:08:28.000Z
|
2022-03-25T06:27:38.000Z
|
tests/test_label_smoothing_ce.py
|
waking95/easy-bert
|
576678343c251a134748941d1aa5e3368786337e
|
[
"MIT"
] | null | null | null |
tests/test_label_smoothing_ce.py
|
waking95/easy-bert
|
576678343c251a134748941d1aa5e3368786337e
|
[
"MIT"
] | 1
|
2022-02-10T02:59:51.000Z
|
2022-02-10T02:59:51.000Z
|
import unittest
import torch
from easy_bert.losses.label_smoothing_loss import LabelSmoothingCrossEntropy
class MyTestCase(unittest.TestCase):
def test(self):
print('test~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
lsce = LabelSmoothingCrossEntropy()
logits = torch.randn(4, 2) # (batch_size=4, label_size=2)
target = torch.tensor([0, 1, 1, 0])
loss = lsce(logits, target)
print(loss)
def test_ignore_index(self):
print('test_ignore_index~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
lsce = LabelSmoothingCrossEntropy(ignore_index=-1)
logits = torch.randn(6, 2) # (seq_len=4, label_size=2)
target = torch.tensor([-1, 0, 1, 1, 0, -1]) # 序列标注一般首尾,即[CLS][SEP]部分用-1填充,计算loss时忽略它们
loss = lsce(logits, target)
print(loss)
def test_reduction(self):
print('test_reduction~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
lsce = LabelSmoothingCrossEntropy(reduction='sum')
logits = torch.randn(4, 2) # (batch_size=4, label_size=2)
target = torch.tensor([0, 1, 1, 0])
loss = lsce(logits, target)
print(loss)
if __name__ == '__main__':
unittest.main()
| 36.055556
| 101
| 0.523112
| 1,184
| 0.880952
| 0
| 0
| 0
| 0
| 0
| 0
| 447
| 0.332589
|
54be3891db6fb2756f21aef061add0f576fa4d9b
| 747
|
py
|
Python
|
Algorithms/Sort/Merge Sort/src.py
|
NikhilCodes/DSA-Warehouse
|
f68c3c7c092dc624381e956b065f849d738b5359
|
[
"MIT"
] | null | null | null |
Algorithms/Sort/Merge Sort/src.py
|
NikhilCodes/DSA-Warehouse
|
f68c3c7c092dc624381e956b065f849d738b5359
|
[
"MIT"
] | null | null | null |
Algorithms/Sort/Merge Sort/src.py
|
NikhilCodes/DSA-Warehouse
|
f68c3c7c092dc624381e956b065f849d738b5359
|
[
"MIT"
] | null | null | null |
"""
ALGORITHM : Merge Sort
WORST CASE => {
PERFORMANCE: O(n log(n))
SPACE: O(n)
}
"""
def merge_sort(arr):
size = len(arr)
if size == 1:
return arr
elif size == 2:
if arr[1] > arr[0]:
return [arr[0], arr[1]]
mid = len(arr) // 2
left = merge_sort(arr[:mid])
right = merge_sort(arr[mid:])
merged_arr = []
while len(left) != 0 and len(right) != 0:
if left[0] > right[0]:
merged_arr.append(right.pop(0))
else:
merged_arr.append(left.pop(0))
merged_arr += left + right
return merged_arr
if __name__ == '__main__':
sorted_arr = merge_sort([8, 4, 2, 9, 1, 3])
print(sorted_arr)
| 20.189189
| 48
| 0.497992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 117
| 0.156627
|
54bf36b4e97ce13f93c4eda7288e2207a9d1c577
| 2,295
|
py
|
Python
|
locations/spiders/dollarama.py
|
cmecklenborg/alltheplaces
|
e62b59fb0071b6e289c4622d368fdb203a28347e
|
[
"MIT"
] | null | null | null |
locations/spiders/dollarama.py
|
cmecklenborg/alltheplaces
|
e62b59fb0071b6e289c4622d368fdb203a28347e
|
[
"MIT"
] | null | null | null |
locations/spiders/dollarama.py
|
cmecklenborg/alltheplaces
|
e62b59fb0071b6e289c4622d368fdb203a28347e
|
[
"MIT"
] | null | null | null |
import scrapy
from locations.items import GeojsonPointItem
from urllib.parse import urlencode
from scrapy.selector import Selector
from locations.hours import OpeningHours
Days = ["Su", "Mo", "Tu", "We", "Th", "Fr", "Sa"]
class DollaramaSpider(scrapy.Spider):
name = "dollarama"
item_attributes = {"brand": "Dollarama"}
allowed_domains = ["dollarama.com"]
def start_requests(self):
base_url = "https://www.dollarama.com/en-CA/locations/anydata-api?"
params = {"distance": "100", "units": "miles"}
with open(
"./locations/searchable_points/ca_centroids_100mile_radius.csv"
) as points:
next(points)
for point in points:
_, lat, lon = point.strip().split(",")
params.update({"latitude": lat, "longitude": lon})
yield scrapy.Request(url=base_url + urlencode(params))
def parse_hours(self, hours):
hrs = hours.split("|")
opening_hours = OpeningHours()
for day, hour in zip(Days, hrs):
if hour == "Closed":
continue
open_time, close_time = hour.split("-")
opening_hours.add_range(
day=day,
open_time=open_time,
close_time=close_time,
time_format="%I:%M%p",
)
return opening_hours.as_opening_hours()
def parse(self, response):
data = response.json()
for row in data.get("StoreLocations", []):
properties = {
"ref": row["LocationNumber"],
"name": row["Name"],
"addr_full": row["ExtraData"]["Address"]["AddressNonStruct_Line1"],
"city": row["ExtraData"]["Address"]["Locality"],
"state": row["ExtraData"]["Address"]["Region"],
"postcode": row["ExtraData"]["Address"]["PostalCode"],
"lat": row["Location"]["coordinates"][1],
"lon": row["Location"]["coordinates"][0],
"phone": row["ExtraData"]["Phone"],
}
hours = self.parse_hours(row["ExtraData"]["Hours of operations"])
if hours:
properties["opening_hours"] = hours
yield GeojsonPointItem(**properties)
| 33.26087
| 83
| 0.547277
| 2,069
| 0.901525
| 1,414
| 0.616122
| 0
| 0
| 0
| 0
| 612
| 0.266667
|
54c063aa9c40b1e765ddd298550866419dd317e0
| 4,614
|
py
|
Python
|
faces/recognize_faces_video.py
|
rummens1337/vision-assignment
|
8735e95224be702f1bb33066eef80f098b347b1f
|
[
"MIT"
] | null | null | null |
faces/recognize_faces_video.py
|
rummens1337/vision-assignment
|
8735e95224be702f1bb33066eef80f098b347b1f
|
[
"MIT"
] | null | null | null |
faces/recognize_faces_video.py
|
rummens1337/vision-assignment
|
8735e95224be702f1bb33066eef80f098b347b1f
|
[
"MIT"
] | 1
|
2020-01-06T09:55:35.000Z
|
2020-01-06T09:55:35.000Z
|
# import the necessary packages
from imutils.video import VideoStream
import face_recognition
import imutils
import pickle
import time
import cv2
import os
# https://www.pyimagesearch.com/2018/06/18/face-recognition-with-opencv-python-and-deep-learning/
# https://www.pyimagesearch.com/2018/06/11/how-to-build-a-custom-face-recognition-dataset/
args = {}
# path to serialized db of facial encodings
args['encodings'] = os.path.join(os.path.dirname(__file__), 'encodings.pickle')
# path to output video
args['output'] = None
# whether or not to display output frame to screen
args['display'] = 1
# face detection model to use: either `hog` or `cnn`
args['detection_method'] = 'hog'
# load the known faces and embeddings
print("[INFO] loading encodings...")
data = pickle.loads(open(args["encodings"], "rb").read())
# initialize the video stream and pointer to output video file, then
# allow the camera sensor to warm up
print("[INFO] starting video stream...")
vs = VideoStream(src=0).start()
writer = None
time.sleep(2.0)
# loop over frames from the video file stream
while True:
# grab the frame from the threaded video stream
frame = vs.read()
# convert the input frame from BGR to RGB then resize it to have
# a width of 750px (to speedup processing)
rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
rgb = imutils.resize(frame, width=750)
r = frame.shape[1] / float(rgb.shape[1])
# detect the (x, y)-coordinates of the bounding boxes
# corresponding to each face in the input frame, then compute
# the facial embeddings for each face
boxes = face_recognition.face_locations(rgb,
model=args["detection_method"])
encodings = face_recognition.face_encodings(rgb, boxes)
names = []
# loop over the facial embeddings
for encoding in encodings:
# attempt to match each face in the input image to our known
# encodings
matches = face_recognition.compare_faces(data["encodings"],
encoding)
name = "Unknown"
# check to see if we have found a match
if True in matches:
# find the indexes of all matched faces then initialize a
# dictionary to count the total number of times each face
# was matched
matchedIdxs = [i for (i, b) in enumerate(matches) if b]
counts = {}
# loop over the matched indexes and maintain a count for
# each recognized face face
for i in matchedIdxs:
name = data["names"][i]
counts[name] = counts.get(name, 0) + 1
# determine the recognized face with the largest number
# of votes (note: in the event of an unlikely tie Python
# will select first entry in the dictionary)
name = max(counts, key=counts.get)
# update the list of names
names.append(name)
# loop over the recognized faces
for ((top, right, bottom, left), name) in zip(boxes, names):
# rescale the face coordinates
top = int(top * r)
right = int(right * r)
bottom = int(bottom * r)
left = int(left * r)
# draw the predicted face name on the image
cv2.rectangle(frame, (left, top), (right, bottom),
(0, 255, 0), 2)
y = top - 15 if top - 15 > 15 else top + 15
cv2.putText(frame, name, (left, y), cv2.FONT_HERSHEY_SIMPLEX,
0.75, (0, 255, 0), 2)
# if the video writer is None *AND* we are supposed to write
# the output video to disk initialize the writer
# if writer is None and args["output"] is not None:
# fourcc = cv2.VideoWriter_fourcc(*"MJPG")
# writer = cv2.VideoWriter(args["output"], fourcc, 20,
# (frame.shape[1], frame.shape[0]), True)
#
# # if the writer is not None, write the frame with recognized
# # faces to disk
# if writer is not None:
# writer.write(frame)
# check to see if we are supposed to display the output frame to
# the screen
if args["display"] > 0:
cv2.imshow("Frame", frame)
key = cv2.waitKey(1) & 0xFF
# if the `q` key was pressed, break from the loop
if key == ord("q"):
break
# do a bit of cleanup
cv2.destroyAllWindows()
vs.stop()
# check to see if the video writer point needs to be released
if writer is not None:
writer.release()
| 36.912
| 97
| 0.6114
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 2,393
| 0.518639
|
54c067d1a064f439200439939a3af3a79e1fca5f
| 3,298
|
py
|
Python
|
pytint/machine_io.py
|
semicolonTransistor/PyTint
|
0f70fe756c285cda38b3a91318af02382a505263
|
[
"MIT"
] | 1
|
2020-08-14T19:41:45.000Z
|
2020-08-14T19:41:45.000Z
|
pytint/machine_io.py
|
semicolonTransistor/PyTint
|
0f70fe756c285cda38b3a91318af02382a505263
|
[
"MIT"
] | null | null | null |
pytint/machine_io.py
|
semicolonTransistor/PyTint
|
0f70fe756c285cda38b3a91318af02382a505263
|
[
"MIT"
] | null | null | null |
from pytint.interpreters import FiniteAutomaton
from typing import List, Union, Dict, Iterable
import collections
import yaml
class IncompleteMachine(Exception):
def __init__(self, missing: str, machine_type: str):
self.missing = missing
self.machine_type = machine_type
def __str__(self):
return "\"{}\" is required for {} but not provided".format(self.missing, self.machine_type)
class UnsupportedMachine(Exception):
pass
def load_machine(yaml_input: str, machine_type: str = "", name: str = ""):
# loads yaml from input
data = yaml.safe_load(yaml_input)
# if no type override, attempt to load type from data
if not machine_type:
if "type" in data:
machine_type = str(data["type"]).lower()
else:
# can't find machine type
raise IncompleteMachine("type", "machine")
if not name and "name" in data:
name = data["name"]
if "start" in data:
start = str(data["start"])
start
else:
raise IncompleteMachine("start", machine_type)
if machine_type == "dfa" or machine_type == "nfa":
machine = FiniteAutomaton(name)
machine.set_start_state(start)
if "accept-states" in data:
raw_accepted: Union[any, Iterable[any]] = data["accept-states"]
if isinstance(raw_accepted, str) or not isinstance(raw_accepted, collections.Iterable):
raw_accepted = [raw_accepted]
accepted: List[str] = list(map(lambda x: str(x), raw_accepted))
for accept_state in accepted:
machine.add_accepting_state(accept_state)
else:
raise IncompleteMachine("accept-states", machine_type)
if "transitions" in data:
for transition in data["transitions"]:
if len(transition) < 3:
raise Exception("Transitions are 3-tuples!")
state: str = str(transition[0])
raw_symbols: Union[any, Iterable[any]] = str(transition[1])
if isinstance(raw_symbols, str) or not isinstance(raw_symbols, collections.Iterable):
raw_symbols = [raw_symbols]
symbols: List[str] = list(map(lambda x: str(x), raw_symbols))
raw_next_states: Union[any, Iterable[any]] = transition[2]
if isinstance(raw_next_states, str) or not isinstance(raw_next_states, collections.Iterable):
raw_next_states = [raw_next_states]
next_states: List[str] = list(map(lambda x: str(x), raw_next_states))
for symbol in symbols:
if symbol.lower() == "epsilon" or symbol.lower() == "ε": # process epsilon
symbol = "ε"
for next_state in next_states:
machine.add_transition(state, symbol, next_state)
else:
raise IncompleteMachine("transitions", machine_type)
return machine
else:
raise UnsupportedMachine("{} is not a supported machine type!".format(machine_type))
def load_machine_from_file(path: str, machine_type: str = "", name: str = ""):
with open(path, "r") as f:
text = f.read()
return load_machine(text, machine_type, name)
| 36.644444
| 109
| 0.608854
| 333
| 0.100909
| 0
| 0
| 0
| 0
| 0
| 0
| 408
| 0.123636
|
54c1abcc8ecb4f60275606b22bbb22422b5b3be6
| 1,021
|
py
|
Python
|
dashboard/frontend/callbacks.py
|
AndreWohnsland/CocktailBerry
|
60b2dfc3a4a6f3ef9ab2d946a97d14829e575a9d
|
[
"MIT"
] | 1
|
2022-03-06T23:50:34.000Z
|
2022-03-06T23:50:34.000Z
|
dashboard/frontend/callbacks.py
|
AndreWohnsland/CocktailBerry
|
60b2dfc3a4a6f3ef9ab2d946a97d14829e575a9d
|
[
"MIT"
] | 4
|
2022-03-03T11:16:17.000Z
|
2022-03-20T15:53:37.000Z
|
dashboard/frontend/callbacks.py
|
AndreWohnsland/CocktailBerry
|
60b2dfc3a4a6f3ef9ab2d946a97d14829e575a9d
|
[
"MIT"
] | null | null | null |
import dash
from dash.dependencies import Input, Output # type: ignore
import datetime
from treemap import generate_treemap, get_plot_data
from app import app
from store import store
@app.callback(Output('treemap', 'figure'),
Output('timeclock', "children"),
Input('interval-component', 'n_intervals'),
Input('url', 'pathname'))
def update_plot(n, pathname):
routes = {
"/n_today": 1,
"/vol_today": 2,
"/n_all": 3,
"/vol_all": 4,
}
graphtype = routes.get(pathname, 1)
store.current_graph_type = graphtype
df = get_plot_data(store.current_graph_type)
now_time = datetime.datetime.now().strftime('%H:%M')
trigger_id = dash.callback_context.triggered[0]["prop_id"]
triggered_by_time = trigger_id == "interval-component.n_intervals"
if df.equals(store.last_data) and triggered_by_time:
return [dash.no_update, now_time]
store.last_data = df
fig = generate_treemap(df)
return [fig, now_time]
| 31.90625
| 70
| 0.663075
| 0
| 0
| 0
| 0
| 833
| 0.815867
| 0
| 0
| 188
| 0.184133
|
54c3ac280575bb0ee6051627754ebf1784317751
| 4,095
|
py
|
Python
|
tms/useraccount/views.py
|
csagar131/TicketManagementSystem
|
d2c6b340dcb1d7607257d88dc5b931a0624a774b
|
[
"Apache-2.0"
] | null | null | null |
tms/useraccount/views.py
|
csagar131/TicketManagementSystem
|
d2c6b340dcb1d7607257d88dc5b931a0624a774b
|
[
"Apache-2.0"
] | 4
|
2021-06-04T23:51:17.000Z
|
2022-02-10T10:41:21.000Z
|
tms/useraccount/views.py
|
csagar131/TicketManagementSystem
|
d2c6b340dcb1d7607257d88dc5b931a0624a774b
|
[
"Apache-2.0"
] | 1
|
2020-06-04T11:44:42.000Z
|
2020-06-04T11:44:42.000Z
|
from django.shortcuts import render
from rest_framework.viewsets import ModelViewSet
from useraccount.serializer import UserSerializer,AgentUserSerializer
from rest_framework.views import APIView
from useraccount.models import User
from django.http.response import JsonResponse
from django.template.loader import render_to_string
from django.core.mail import send_mail
from rest_framework.authtoken.models import Token
from rest_framework.authentication import TokenAuthentication
from ticket.models import Organization
import random
import array
def username_generator(email):
email = email.split('@')[0]
return email
def password_generator():
passwd = ''
temp_pass_list = []
MAX_LEN = 12
DIGITS = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9']
LOCASE_CHARACTERS = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',
'i', 'j', 'k', 'm', 'n', 'o', 'p', 'q',
'r', 's', 't', 'u', 'v', 'w', 'x', 'y',
'z']
UPCASE_CHARACTERS = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H',
'I', 'J', 'K', 'M', 'N', 'O', 'p', 'Q',
'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y',
'Z']
SYMBOLS = ['@', '#', '$', '%', '=', ':', '?', '.', '/', '|', '~', '>',
'*', '(', ')', '<&# 039;']
# combines all the character arrays above to form one array
COMBINED_LIST = DIGITS + UPCASE_CHARACTERS + LOCASE_CHARACTERS + SYMBOLS
# randomly select at least one character from each character set above
rand_digit = random.choice(DIGITS)
rand_upper = random.choice(UPCASE_CHARACTERS)
rand_lower = random.choice(LOCASE_CHARACTERS)
rand_symbol = random.choice(SYMBOLS)
temp_pass = rand_digit + rand_upper + rand_lower + rand_symbol
for x in range(MAX_LEN - 4):
temp_pass = temp_pass + random.choice(COMBINED_LIST)
temp_pass_list=array.array('&# 039;u&# 039;, temp_pass')
random.shuffle(temp_pass_list)
for x in temp_pass_list:
passwd +=x
return passwd
class UserModelViewset(ModelViewSet):
serializer_class = UserSerializer
authentication_classes = [TokenAuthentication]
queryset = User.objects.all()
def create(self,request,*args,**kwargs):
ser_data = self.get_serializer(data = request.data)
if ser_data.is_valid():
org=Organization.objects.create(name = request.data.get('org_name'))
user = User.objects.create_user(request.data.get('username'), request.data.get('email'),
request.data.get('password'),is_admin = True,organization = org)
usr = request.data['username']
msg_html = render_to_string('email_template.html',{'usr':usr})
send_mail('Subject here','Here is the message.','chouhansagar131@gmail.com',
[request.data['email'],'chouhansagar131@gmail.com'],html_message=msg_html,
fail_silently=False,
)
token = str(Token.objects.create(user=user))
return JsonResponse({'token':token,'user':ser_data.data})
else:
return JsonResponse(ser_data.errors)
class AgentUserViewSet(ModelViewSet):
serializer_class = AgentUserSerializer
queryset = User.objects.filter(is_admin = False)
def create(self,request,*args,**kwargs):
ser_data = self.get_serializer(data = request.data)
if ser_data.is_valid():
email = request.data.get('email')
username = username_generator(email)
password = '12345678'
org = Organization.objects.get(name = request.data.get('org_name'))
user = User.objects.create_user(username=username,password= password,email = email,organization = org)
usr_ser = UserSerializer(user)
token = str(Token.objects.create(user=user))
return JsonResponse({'token':token,'username':username,'password':password})
else:
return JsonResponse(ser_data.errors)
| 36.238938
| 114
| 0.60464
| 1,955
| 0.477411
| 0
| 0
| 0
| 0
| 0
| 0
| 636
| 0.155311
|
54c4b203b6a2600da692213b5eb8857816d71318
| 2,203
|
py
|
Python
|
ppocr/utils/special_character.py
|
ZacksTsang/PaddleOCR
|
c716553f6f369d191b91690a81936a19173a7c33
|
[
"Apache-2.0"
] | 1
|
2021-08-12T17:16:02.000Z
|
2021-08-12T17:16:02.000Z
|
ppocr/utils/special_character.py
|
ZacksTsang/PaddleOCR
|
c716553f6f369d191b91690a81936a19173a7c33
|
[
"Apache-2.0"
] | null | null | null |
ppocr/utils/special_character.py
|
ZacksTsang/PaddleOCR
|
c716553f6f369d191b91690a81936a19173a7c33
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class SpecialCharacter(object):
"""
Special Sign Converter
"""
def __init__(self, config):
self.special_char = []
self.normal_char = []
if "special_character_dict_path" in config:
special_char_dict_path = config['special_character_dict_path']
with open(special_char_dict_path, "rb") as fin:
lines = fin.readlines()
for line in lines:
line = line.decode('utf-8').strip("\n").strip("\r\n")
result = line.split(',')
if len(result) == 2:
self.special_char.append(result[0])
self.normal_char.append(result[1])
else:
self.special_char = [u'0',u'1',u'2',u'3',u'4',u'5',u'6',u'7',u'8',u'9',u'A',u'B',u'C',u'D',u'E',u'F',u'G',u'H',u'I',u'J',u'K',u'L',u'M',u'N',u'O',u'P',u'Q',u'R',u'S',u'T',u'U',u'V',u'W',u'X',u'Y',u'Z']
self.normal_char = ['0','1','2','3','4','5','6','7','8','9','A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z']
def normalText(self, text):
"""
normal converter, replace special sign
"""
for index,item in enumerate(self.special_char):
if text.find(item) >= 0:
text = text.replace(item, self.normal_char[index])
return text
if __name__ == "__main__":
sp = SpecialCharacter({'special_character_dict_path': './special_character_dict.txt'})
print(sp.normalText('2021'.decode('utf-8')))
| 43.196078
| 213
| 0.576033
| 1,472
| 0.645897
| 0
| 0
| 0
| 0
| 0
| 0
| 1,211
| 0.531373
|
54c4dc3efeaaf5e89758e47b3cc255b10a88682a
| 1,160
|
py
|
Python
|
setup.py
|
ionata/django-unique-uploadto
|
da66ed30d6abd86566d9b141e3c48b10340740a2
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
ionata/django-unique-uploadto
|
da66ed30d6abd86566d9b141e3c48b10340740a2
|
[
"BSD-3-Clause"
] | 1
|
2017-11-21T22:11:24.000Z
|
2017-11-22T00:38:17.000Z
|
setup.py
|
ionata/django-unique-uploadto
|
da66ed30d6abd86566d9b141e3c48b10340740a2
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
from __future__ import absolute_import, print_function, unicode_literals
from setuptools import setup, find_packages
from unique_uploadto import __version__
with open('README.rst', 'r') as f:
readme = f.read()
setup(
name='django-unique-uploadto',
version=__version__,
description='Use a unique filename for django uploads',
long_description=readme,
author='Ionata Digital',
author_email='webmaster@ionata.com.au',
url='https://github.com/ionata/django-unique-uploadto',
license='BSD',
packages=find_packages(),
install_requires=[
'django>=1.8.0',
],
package_data={},
include_package_data=True,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Framework :: Django',
],
)
| 27.619048
| 72
| 0.64569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 558
| 0.481034
|
54c84616a029f134346dc45645dd043f6f816a04
| 793
|
py
|
Python
|
scripts/python/helper/decoration.py
|
sulthonzh/zaruba
|
ec9262f43da17d86330da2c593b7da451aabd60f
|
[
"Apache-2.0"
] | null | null | null |
scripts/python/helper/decoration.py
|
sulthonzh/zaruba
|
ec9262f43da17d86330da2c593b7da451aabd60f
|
[
"Apache-2.0"
] | null | null | null |
scripts/python/helper/decoration.py
|
sulthonzh/zaruba
|
ec9262f43da17d86330da2c593b7da451aabd60f
|
[
"Apache-2.0"
] | null | null | null |
import random
normal="\033[0m"
bold="\033[1m"
faint="\033[2m"
italic="\033[3m"
underline="\033[4m"
blinkSlow="\033[5m"
blinkRapid="\033[6m"
inverse="\033[7m"
conceal="\033[8m"
crossedOut="\033[9m"
black="\033[30m"
red="\033[31m"
green="\033[32m"
yellow="\033[33m"
blue="\033[34m"
magenta="\033[35m"
cyan="\033[36m"
white="\033[37m"
bgBlack="\033[40m"
bgRed="\033[41m"
bgGreen="\033[42m"
bgYellow="\033[43m"
bgBlue="\033[44m"
bgMagenta="\033[45m"
bgCyan="\033[46m"
bgWhite="\033[47m"
noStyle="\033[0m"
noUnderline="\033[24m"
noInverse="\033[27m"
noColor="\033[39m"
def generate_icon() -> str:
icon_list = ['🥜', '🍄', '🌰', '🍞', '🥐', '🥖', '🥞', '🧀', '🍖', '🍗', '🥓', '🍔', '🍟', '🍕', '🌭', '🌮', '🌯', '🥙', '🍲', '🥗', '🍿']
index = random.randrange(0, len(icon_list))
return icon_list[index]
| 20.333333
| 121
| 0.583859
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 415
| 0.484813
|
49a498a0dfc278640dff975e47a36448f00bf3bc
| 2,918
|
py
|
Python
|
data_structures/tree/avl_tree.py
|
hongta/practice-python
|
52d5278ea5402ea77054bfa5c4bfdbdf81c9c963
|
[
"MIT"
] | null | null | null |
data_structures/tree/avl_tree.py
|
hongta/practice-python
|
52d5278ea5402ea77054bfa5c4bfdbdf81c9c963
|
[
"MIT"
] | null | null | null |
data_structures/tree/avl_tree.py
|
hongta/practice-python
|
52d5278ea5402ea77054bfa5c4bfdbdf81c9c963
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from tree_node import AVLTreeNode
from binary_search_tree import BinarySearchTree
class AVLTree(BinarySearchTree):
def __init__(self):
super(AVLTree, self).__init__()
def insert(self, k, payload=None):
# tree is empty construct the tree
if not self._root:
self._root= AVLTreeNode(k,payload)
else:
n = AVLTreeNode(k, payload)
self._insert(self._root, n)
def _insert(self, tree_node, new_node):
if new_node.key == tree_node.key:
tree_node.payload = new_node.payload
return tree_node
if new_node.key < tree_node.key:
if not tree_node.left:
tree_node.set_children(left=new_node)
else:
self._insert(tree_node.left, new_node)
else:
if not tree_node.right:
tree_node.set_children(right=new_node)
else:
self._insert(tree_node.right, new_node)
return self._avl_insert_fixup(tree_node)
def _avl_insert_fixup(self, node):
# 2. update height of the ancestor node
self._update_height(node)
# 3. check whether the node became unbalanced
balance = self.get_balance(node)
if self.get_balance(node) ==2:
if self.get_balance(node.right) < 0:
node.right = self._right_rotate(node.right)
return self._left_rotate(node)
if self.get_balance == -2:
if self.get_balance(node.left) > 0:
node.left = self._left_rotate(node.left)
return self._right_rotate(node)
return node
def _update_height(self, node):
node.height = max(self.height(node.left), self.height(node.right)) + 1
def height(self, n):
if not n:
return 0
else:
return n.height
def get_balance(self, node):
if not node:
return 0
return self.height(node.right) - self.height(node.left);
def _right_rotate(self, node):
k1 = node.left
self._replace_with(node, k1)
node.set_children(left=k1.right)
k1.set_children(right=node)
self._update_height(node)
self._update_height(k1)
return k1
def _left_rotate(self, node):
k2 = node.right
self._replace_with(node, k2)
node.set_children(right=k2.left)
k2.set_children(left=node)
self._update_height(node)
self._update_height(k2)
return k2
if __name__ == '__main__':
t = AVLTree()
t.insert(10)
t.insert(15)
t.insert(20)
t.insert(25)
t.insert(30)
p = t.search(20)
print p, p.left, p.right, p.height, p.parent
p = t.search(15)
print p, p.left, p.right, p.height, p.parent
p = t.search(25)
print p, p.left, p.right, p.height, p.parent
| 26.770642
| 78
| 0.59013
| 2,446
| 0.838245
| 0
| 0
| 0
| 0
| 0
| 0
| 172
| 0.058944
|
49a4e7b419d4d64776cdbda3fd3b82f70e450c6d
| 96
|
py
|
Python
|
ardget_app/apps.py
|
shumdeveloper/ardget
|
585a93ce24e747014f2cbde8daae600e26fbd835
|
[
"MIT"
] | null | null | null |
ardget_app/apps.py
|
shumdeveloper/ardget
|
585a93ce24e747014f2cbde8daae600e26fbd835
|
[
"MIT"
] | null | null | null |
ardget_app/apps.py
|
shumdeveloper/ardget
|
585a93ce24e747014f2cbde8daae600e26fbd835
|
[
"MIT"
] | null | null | null |
from django.apps import AppConfig
class TempArduinoConfig(AppConfig):
name = 'ardget_app'
| 16
| 35
| 0.770833
| 59
| 0.614583
| 0
| 0
| 0
| 0
| 0
| 0
| 12
| 0.125
|
49a74574e4d388966ade396ad88447197a6c63e8
| 1,944
|
py
|
Python
|
dynamic_rest/datastructures.py
|
reinert/dynamic-rest
|
aaf3973f69b53ed317b9c8468942523715814fa8
|
[
"MIT"
] | 690
|
2016-02-05T22:46:03.000Z
|
2022-03-28T18:59:49.000Z
|
dynamic_rest/datastructures.py
|
reinert/dynamic-rest
|
aaf3973f69b53ed317b9c8468942523715814fa8
|
[
"MIT"
] | 190
|
2015-03-06T16:57:21.000Z
|
2022-02-02T21:56:07.000Z
|
dynamic_rest/datastructures.py
|
reinert/dynamic-rest
|
aaf3973f69b53ed317b9c8468942523715814fa8
|
[
"MIT"
] | 117
|
2016-05-05T13:51:07.000Z
|
2022-02-28T18:25:56.000Z
|
"""This module contains custom data-structures."""
import six
class TreeMap(dict):
"""Tree structure implemented with nested dictionaries."""
def get_paths(self):
"""Get all paths from the root to the leaves.
For example, given a chain like `{'a':{'b':{'c':None}}}`,
this method would return `[['a', 'b', 'c']]`.
Returns:
A list of lists of paths.
"""
paths = []
for key, child in six.iteritems(self):
if isinstance(child, TreeMap) and child:
# current child is an intermediate node
for path in child.get_paths():
path.insert(0, key)
paths.append(path)
else:
# current child is an endpoint
paths.append([key])
return paths
def insert(self, parts, leaf_value, update=False):
"""Add a list of nodes into the tree.
The list will be converted into a TreeMap (chain) and then
merged with the current TreeMap.
For example, this method would insert `['a','b','c']` as
`{'a':{'b':{'c':{}}}}`.
Arguments:
parts: List of nodes representing a chain.
leaf_value: Value to insert into the leaf of the chain.
update: Whether or not to update the leaf with the given value or
to replace the value.
Returns:
self
"""
tree = self
if not parts:
return tree
cur = tree
last = len(parts) - 1
for i, part in enumerate(parts):
if part not in cur:
cur[part] = TreeMap() if i != last else leaf_value
elif i == last: # found leaf
if update:
cur[part].update(leaf_value)
else:
cur[part] = leaf_value
cur = cur[part]
return self
| 29.907692
| 77
| 0.513374
| 1,879
| 0.966564
| 0
| 0
| 0
| 0
| 0
| 0
| 973
| 0.500514
|
49a7ee42b8f9f516686c7f73c30cfb6480597ce8
| 2,605
|
py
|
Python
|
functions.py
|
heEXDe/password_generator
|
c546c09be927abc2a02971cab5f2d19817208cda
|
[
"MIT"
] | null | null | null |
functions.py
|
heEXDe/password_generator
|
c546c09be927abc2a02971cab5f2d19817208cda
|
[
"MIT"
] | null | null | null |
functions.py
|
heEXDe/password_generator
|
c546c09be927abc2a02971cab5f2d19817208cda
|
[
"MIT"
] | null | null | null |
# functions for actions
import random
import string
import GUI
def generate_password():
password = ''
GUI.lblError.config(text='')
passLength = GUI.var.get()
if (GUI.varDigi.get() == 1) & (GUI.varChLower.get() == 1) & (GUI.varChUpper.get() == 1):
strin = string.ascii_letters
for i in range(passLength):
chornumb = random.choice(['ch', 'digi'])
if chornumb == 'ch':
password = password + random.choice(strin)
else:
password = password + str(random.randint(0, 10))
elif (GUI.varDigi.get() == 1) & (GUI.varChLower.get() == 1) & (GUI.varChUpper.get() == 0):
strin = string.ascii_lowercase
for i in range(passLength):
chornumb = random.choice(['ch', 'digi'])
if chornumb == 'ch':
password = password + random.choice(strin)
else:
password = password + str(random.randint(0, 10))
elif (GUI.varDigi.get() == 1) & (GUI.varChLower.get() == 0) & (GUI.varChUpper.get() == 1):
strin = string.ascii_uppercase
for i in range(passLength):
chornumb = random.choice(['ch', 'digi'])
if chornumb == 'ch':
password = password + random.choice(strin)
else:
password = password + str(random.randint(0, 10))
elif (GUI.varDigi.get() == 0) & (GUI.varChLower.get() == 1) & (GUI.varChUpper.get() == 1):
strin = string.ascii_letters
for i in range(passLength):
password = password + random.choice(strin)
elif (GUI.varDigi.get() == 0) & (GUI.varChLower.get() == 0) & (GUI.varChUpper.get() == 1):
strin = string.ascii_uppercase
for i in range(passLength):
password = password + random.choice(strin)
elif (GUI.varDigi.get() == 0) & (GUI.varChLower.get() == 1) & (GUI.varChUpper.get() == 0):
strin = string.ascii_lowercase
for i in range(passLength):
password = password + random.choice(strin)
elif (GUI.varDigi.get() == 1) & (GUI.varChLower.get() == 0) & (GUI.varChUpper.get() == 0):
for i in range(passLength):
password = password + str(random.randint(0, 10))
else:
GUI.lblError.config(text='error!')
# print(password)
print(str(GUI.varDigi.get()) + ', ' + str(GUI.varChLower.get()) + ', ' + str(GUI.varChUpper.get()))
# print(strin)
GUI.lblPassword.config(text=password)
def copy_pass():
toclpboard = GUI.lblPassword.cget("text")
GUI.root.clipboard_clear()
GUI.root.clipboard_append(toclpboard)
| 42.016129
| 103
| 0.571209
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 122
| 0.046833
|
49a800c2275f46ea1981d8aa809ee37691f78025
| 1,330
|
py
|
Python
|
lottery/branch/retrain.py
|
chenw23/open_lth
|
2ce732fe48abd5a80c10a153c45d397b048e980c
|
[
"MIT"
] | 509
|
2020-05-07T16:45:46.000Z
|
2022-03-28T13:41:36.000Z
|
lottery/branch/retrain.py
|
chenw23/open_lth
|
2ce732fe48abd5a80c10a153c45d397b048e980c
|
[
"MIT"
] | 12
|
2020-06-10T10:07:09.000Z
|
2022-02-03T01:57:32.000Z
|
lottery/branch/retrain.py
|
chenw23/open_lth
|
2ce732fe48abd5a80c10a153c45d397b048e980c
|
[
"MIT"
] | 103
|
2020-05-07T21:40:06.000Z
|
2022-03-11T19:07:55.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import datasets.registry
from foundations import hparams
from foundations.step import Step
from lottery.branch import base
import models.registry
from pruning.mask import Mask
from pruning.pruned_model import PrunedModel
from training import train
class Branch(base.Branch):
def branch_function(
self,
retrain_d: hparams.DatasetHparams,
retrain_t: hparams.TrainingHparams,
start_at_step_zero: bool = False
):
# Get the mask and model.
m = models.registry.load(self.level_root, self.lottery_desc.train_start_step, self.lottery_desc.model_hparams)
m = PrunedModel(m, Mask.load(self.level_root))
start_step = Step.from_iteration(0 if start_at_step_zero else self.lottery_desc.train_start_step.iteration,
datasets.registry.iterations_per_epoch(retrain_d))
train.standard_train(m, self.branch_root, retrain_d, retrain_t, start_step=start_step, verbose=self.verbose)
@staticmethod
def description():
return "Retrain the model with different hyperparameters."
@staticmethod
def name():
return 'retrain'
| 35.945946
| 118
| 0.72406
| 902
| 0.678195
| 0
| 0
| 157
| 0.118045
| 0
| 0
| 257
| 0.193233
|
49a855768d0faa6b5929b201dd9c0e69c1e8d0cf
| 1,860
|
py
|
Python
|
Sumo_programs/probablyGoodCode/Lyall's_Test_File.py
|
senornosketchy/ENGG1000-R2R
|
5c6880e81560079d22c8dbbadd9c7fdd1e585aa4
|
[
"MIT"
] | null | null | null |
Sumo_programs/probablyGoodCode/Lyall's_Test_File.py
|
senornosketchy/ENGG1000-R2R
|
5c6880e81560079d22c8dbbadd9c7fdd1e585aa4
|
[
"MIT"
] | null | null | null |
Sumo_programs/probablyGoodCode/Lyall's_Test_File.py
|
senornosketchy/ENGG1000-R2R
|
5c6880e81560079d22c8dbbadd9c7fdd1e585aa4
|
[
"MIT"
] | null | null | null |
"""
Created on Thu Mar 22 15:07:43 2018
@author: Tanvee
First attempt at an program for the EV3 bot.
The main aim of this is to develop an algorithm to search clockwise for and identify
close objects, before rushing to meet them.
"""
print(0)
from time import sleep
import sys, os
# Import the ev3dev specific library
from ev3dev.ev3 import *
print(1)
# Connect motors
rightMotor = LargeMotor(OUTPUT_C)
assert rightMotor.connected
leftMotor = LargeMotor(OUTPUT_B)
assert leftMotor.connected
# Connect sensors
print(2)
tsRIGHT = TouchSensor(INPUT_3)
assert tsRIGHT.connected
tsLEFT = TouchSensor(INPUT_2)
assert tsLEFT.connected
us = UltrasonicSensor()
assert us.connected
cs = ColorSensor(INPUT_4)
assert cs.connected
print("All Connected")
# The gyro is reset when the mode is changed, so the first line is extra, just so we
# can change the mode the 'GYRO-ANGLE', which is what we want
# gs.mode = 'GYRO-RATE' # Changing the mode resets the gyro
# gs.mode = 'GYRO-ANG' # Set gyro mode to return compass angle
# We will need to check EV3 buttons state.
btn = Button()
# FUNCTION DEFINITIONS
def drive(left, right):
"""
Start both motors at the given speeds.
"""
leftMotor.run_direct(duty_cycle_sp=left)
rightMotor.run_direct(duty_cycle_sp=right)
def stop():
# Stop both motors
leftMotor.stop(stop_action='brake')
rightMotor.stop(stop_action='brake')
def main():
print(btn.buttons_pressed)
if btn.left:
stop()
if btn.right:
print("The button was pressed")
drive(100, -100)
sleep(3)
stop()
"""
The default action is to spin around in an attempt to detect any object
within a certain radius using the ultrasonic sensor.
If the ultrasonic detects anything within 500mm the robot's reacts by "charging" at the object
"""
while True:
main()
| 21.882353
| 98
| 0.716667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 999
| 0.537097
|
49a87d079120bfbcccec5530adc7e03acb1cb9a1
| 13,984
|
py
|
Python
|
tests/test_modelgen.py
|
PipGrylls/sqlalchemy-modelgen
|
988e7b39fa4f8b2ddac35792c21e147e8260df17
|
[
"MIT"
] | 18
|
2021-04-01T20:32:42.000Z
|
2021-06-01T05:24:27.000Z
|
tests/test_modelgen.py
|
PipGrylls/sqlalchemy-modelgen
|
988e7b39fa4f8b2ddac35792c21e147e8260df17
|
[
"MIT"
] | null | null | null |
tests/test_modelgen.py
|
PipGrylls/sqlalchemy-modelgen
|
988e7b39fa4f8b2ddac35792c21e147e8260df17
|
[
"MIT"
] | 1
|
2021-11-23T01:17:18.000Z
|
2021-11-23T01:17:18.000Z
|
from unittest import TestCase, mock
from modelgen import ModelGenerator, Base
from os import getcwd, path
class TestModelgen(TestCase):
@classmethod
def setUpClass(self):
self.yaml = {'tables': {'userinfo':{'columns':
[{'name': 'firstname', 'type': 'varchar'},
{'name': 'lastname', 'type': 'varchar'},
{'name': 'dob', 'type': 'date'},
{'name': 'contact', 'type': 'numeric'},
{'name': 'address', 'type': 'varchar'}]}}}
self.logger = Base().logger
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_model_wo_alembic(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate):
'''
Test create_model function without setting alembic
support to True
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
model_obj = ModelGenerator()
response = model_obj._create_model('test')
self.assertEqual(True, response)
mock_prsr.assert_called_with(filepath=path.join(getcwd(), 'templates/test.yaml'))
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'models/test.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_alembic_meta')
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_model_w_alembic(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate, mock_cam):
'''
Test _create_model function with setting alembic
support to True
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
mock_cam.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_model(datasource='./test', alembic=True)
self.assertEqual(True, response)
mock_prsr.assert_called_with(filepath=path.join(getcwd(), 'templates/./test.yaml'))
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'models/./test.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.Validate')
@mock.patch('modelgen.ModelGenerator.__init__')
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.Parser')
@mock.patch('modelgen.modelgenerator.Template')
def test_create_alembic_meta(self, mock_templt, mock_prsr, mock_pth,
mock_wrtf, mock_init, mock_validate):
'''
Test _create_alembic_meta function. Function creates
alembic support by a folder called metadata and
a file __init__.py in the folder. This file contains
sqlalchemy metadata imported from all the sqlalchemy
model files
'''
mock_init.return_value = None
mock_validate.validate.return_value = True
mock_wrtf.return_value = True
mock_prsr.data.return_value = self.yaml
model_obj = ModelGenerator()
response = model_obj._create_alembic_meta()
self.assertEqual(True, response)
mock_wrtf.assert_called_with(path=path.join(getcwd(), 'metadata/__init__.py'),
data=mock_templt().render())
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_template_folder(self, mock_cpyfile, mock_pth, mock_ospth):
'''
Test _create_template_folder function. Function creates
templates folder structure when modelgen is initialized
'''
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_template_folder(init='./testfolder')
self.assertEqual(response, True)
mock_cpyfile.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.ModelGenerator._create_alembic_folder')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_template_folder_exists(self, mock_cpyfile, mock_ospth, mock_pth, mock_caf):
'''
Test _create_template_folder function when folder already exists
Function throws FileExistsError.
'''
mock_pth.mkdir.return_value = FileExistsError
mock_caf.return_value = True
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
with self.assertRaises(FileExistsError) as err:
model_obj._create_template_folder(init='./models')
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder(self, mock_cpyfile, mock_pth, mock_ospth,
mock_cptr):
'''
Test _create_alembic_folder function. Tests the
creation of folders alembic/versions, alembic/alembic.ini,
alembic/env.py. Relative path is passed in this
test
'''
mock_cptr.return_value = True
mock_ospth.join.return_value = './testfolder'
mock_ospth.isabs.return_value = False
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_alembic_folder(init='./testfolder')
self.assertEqual(response, True)
mock_cptr.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.Path')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder_absolute_path(self, mock_cpyfile, mock_pth, mock_ospth,
mock_cptr):
'''
Test _create_alembic_folder function. Tests the
creation of folders alembic/versions, alembic/alembic.ini,
alembic/env.py. Absolute path is passed in this
test.
'''
mock_cptr.return_value = True
mock_ospth.join.return_value = '/testfolder'
mock_ospth.exists.return_value = False
mock_pth.mkdir.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
response = model_obj._create_alembic_folder(init='/testfolder')
self.assertEqual(response, True)
mock_cptr.assert_called_with(mock_ospth.join(), mock_ospth.join())
@mock.patch('modelgen.ModelGenerator._create_template_folder')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.copytree')
@mock.patch('modelgen.modelgenerator.copyfile')
def test_create_alembic_folder_exists(self, mock_cpyfile, mock_cptr, mock_ospth, mock_ctf):
'''
Test _create_alembic_folder function when folder
already exists. The function raises FileExistsError
'''
mock_ctf.return_value = True
mock_cptr.return_value = True
mock_ospth.join.side_effects = ['./test', './test', './test', './test']
mock_ospth.exists.return_value = True
mock_cpyfile.return_value = True
model_obj = ModelGenerator()
with self.assertRaises(FileExistsError) as err:
model_obj._create_alembic_folder(init='./docs')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_alembic_folder')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_template_folder')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_checkpoint_file')
def test_modelgenerator_init(self, mock_cafldr, mock_ctfldr, mock_cchk):
obj = ModelGenerator(init='./test')
mock_cafldr.assert_called_with(init='./test')
mock_cchk.assert_called_with(init='./test')
mock_ctfldr.assert_called_with(init='./test')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_w_yaml_extn(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema yaml file with extension .yaml is passed
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yaml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_w_yml_extn(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema yaml file with extension .yml is passed
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_init_create_model_elif_wo_yaml_extn(self, mock_fcf, mock_cm, mock_ospth):
'''
Test modelgen/modelgenerator.py file's __init__ method
when schema file without .yaml or .yml is passed. The
function will throw NameError
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
with self.assertRaises(NameError) as err:
obj = ModelGenerator(createmodel=True, file='./test.txt')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_createmodel_find_checkpoint_file_true(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test _find_checkpoint_file_ when the checkpoint file,
.modelgen, exists.
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf = True
obj = ModelGenerator(createmodel=True, file='./test.yaml')
@mock.patch('modelgen.modelgenerator.path')
@mock.patch('modelgen.modelgenerator.ModelGenerator._create_model')
@mock.patch('modelgen.modelgenerator.ModelGenerator._find_checkpoint_file')
def test_modelgenerator_createmodel_find_checkpoint_file_false(self, mock_fcf,
mock_cm, mock_ospth):
'''
Test _find_checkpoint_file_ when the checkpoint file,
.modelgen, doesn't exists.
'''
mock_ospth.return_value = True
mock_cm.return_value = True
mock_fcf.return_value = False
obj = ModelGenerator(createmodel=True, file='./test.yaml')
mock_fcf.assert_called_with()
@mock.patch('modelgen.modelgenerator.Helper.write_to_file')
def test_create_checkpoint_file(self, mock_wrtf):
'''
Test _create_checkpoint_file. The checkpoint file
is created when the modelgen is initialized for the
first time
'''
mock_wrtf.return_value = True
obj = ModelGenerator()
obj._create_checkpoint_file(init='./dummy')
mock_wrtf.assert_called_with(path='./dummy/.modelgen', data='')
@mock.patch('modelgen.modelgenerator.path')
def test_find_checkpoint_file_exists(self, mock_ospth):
mock_ospth.exists.return_value = True
obj = ModelGenerator()
response = obj._find_checkpoint_file()
self.assertEqual(response, True)
mock_ospth.exists.assert_called_with(mock_ospth.join())
@mock.patch('modelgen.modelgenerator.path')
def test_find_checkpoint_file_not_found(self, mock_ospth):
mock_ospth.exists.return_value = False
obj = ModelGenerator()
with self.assertRaises(FileNotFoundError) as err:
obj._find_checkpoint_file()
@classmethod
def tearDownClass(self):
pass
| 44.820513
| 101
| 0.661971
| 13,877
| 0.992348
| 0
| 0
| 13,722
| 0.981264
| 0
| 0
| 5,066
| 0.362271
|
49a8f69931a09da4e91b5822491e86963189f463
| 223
|
py
|
Python
|
papermerge/apps/e_invoice/apps.py
|
francescocarzaniga/e_invoice_papermerge
|
e7a4a3fdab4263c02983b638f873db8d11e89041
|
[
"Apache-2.0"
] | 1
|
2021-02-15T06:38:32.000Z
|
2021-02-15T06:38:32.000Z
|
papermerge/apps/e_invoice/apps.py
|
francescocarzaniga/e_invoice_papermerge
|
e7a4a3fdab4263c02983b638f873db8d11e89041
|
[
"Apache-2.0"
] | null | null | null |
papermerge/apps/e_invoice/apps.py
|
francescocarzaniga/e_invoice_papermerge
|
e7a4a3fdab4263c02983b638f873db8d11e89041
|
[
"Apache-2.0"
] | 1
|
2021-02-15T06:38:35.000Z
|
2021-02-15T06:38:35.000Z
|
from django.apps import AppConfig
class EInvoiceConfig(AppConfig):
name = 'papermerge.apps.e_invoice'
label = 'e_invoice'
# def ready(self):
# from papermerge.apps.data_retention import signals # noqa
| 22.3
| 67
| 0.713004
| 95
| 0.426009
| 0
| 0
| 0
| 0
| 0
| 0
| 126
| 0.565022
|
49a92b917ad9d386c28bdce310accefac0f211c6
| 2,075
|
py
|
Python
|
handler_loud/chat.py
|
ross/simone
|
cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4
|
[
"MIT"
] | null | null | null |
handler_loud/chat.py
|
ross/simone
|
cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4
|
[
"MIT"
] | 1
|
2021-11-04T13:47:28.000Z
|
2021-11-04T13:47:28.000Z
|
handler_loud/chat.py
|
ross/simone
|
cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4
|
[
"MIT"
] | 1
|
2021-10-20T14:44:19.000Z
|
2021-10-20T14:44:19.000Z
|
from logging import getLogger
from random import randrange
import re
from simone.handlers import Registry, exclude_private
from .models import Shout
# Based loosely on https://github.com/desert-planet/hayt/blob/master/scripts/loud.coffee
class Loud(object):
'''
Learns and repeats LOUD MESSAGES!
To add new LOUDs
SAY SOMETHING F*@CK!N% LOUDLY
To remove a LOUD
.loud forget SOMETHING LOUD
'''
log = getLogger('Loud')
regex = re.compile(r'^\s*(?P<loud>[A-Z"][A-Z0-9 .,\'"()\?!&%$#@+-]+)$')
def config(self):
return {'commands': ('loud',), 'messages': True}
def command(self, context, text, **kwargs):
if text.startswith('forget '):
text = text.replace('forget ', '', 1).upper()
try:
shout = Shout.objects.get(text=text)
shout.delete()
context.say(f"OK. I've removed `{text}` from the list.")
except Shout.DoesNotExist:
context.say(f"`{text}` doesn't appear in my list to begin with")
return
context.say(f'Unrecognized sub-command `{text}`')
@exclude_private
def message(self, context, text, **kwargs):
match = self.regex.match(text)
if match:
# there's a loud in there
loud = match.group('loud')
self.log.debug('message: text=%s, match=%s', text, loud)
# store it if it's new
shout, _ = Shout.objects.get_or_create(text=loud)
# find a random shout to join in with, newest shout will have the
# max id so pick a random int less than that.
i = randrange(0, shout.id)
# then select the first shout with an id greater than or equal to
# the random int we picked
shout = Shout.objects.filter(id__gte=i).order_by('id').first()
self.log.debug('message: i=%d, shout=%s', i, shout)
if shout:
# we found something say it
context.say(shout.text)
Registry.register_handler(Loud())
| 33.467742
| 88
| 0.578313
| 1,797
| 0.866024
| 0
| 0
| 901
| 0.434217
| 0
| 0
| 822
| 0.396145
|
49a9a3178fb4042aad889e7fe746a420d38ecae5
| 1,013
|
py
|
Python
|
Algo and DSA/LeetCode-Solutions-master/Python/web-crawler.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 3,269
|
2018-10-12T01:29:40.000Z
|
2022-03-31T17:58:41.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/web-crawler.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 53
|
2018-12-16T22:54:20.000Z
|
2022-02-25T08:31:20.000Z
|
Algo and DSA/LeetCode-Solutions-master/Python/web-crawler.py
|
Sourav692/FAANG-Interview-Preparation
|
f523e5c94d582328b3edc449ea16ac6ab28cdc81
|
[
"Unlicense"
] | 1,236
|
2018-10-12T02:51:40.000Z
|
2022-03-30T13:30:37.000Z
|
# Time: O(|V| + |E|)
# Space: O(|V|)
# """
# This is HtmlParser's API interface.
# You should not implement it, or speculate about its implementation
# """
class HtmlParser(object):
def getUrls(self, url):
"""
:type url: str
:rtype List[str]
"""
pass
class Solution(object):
def crawl(self, startUrl, htmlParser):
"""
:type startUrl: str
:type htmlParser: HtmlParser
:rtype: List[str]
"""
SCHEME = "http://"
def hostname(url):
pos = url.find('/', len(SCHEME))
if pos == -1:
return url
return url[:pos]
result = [startUrl]
lookup = set(result)
for from_url in result:
name = hostname(from_url)
for to_url in htmlParser.getUrls(from_url):
if to_url not in lookup and name == hostname(to_url):
result.append(to_url)
lookup.add(to_url)
return result
| 25.325
| 69
| 0.515301
| 851
| 0.840079
| 0
| 0
| 0
| 0
| 0
| 0
| 329
| 0.324778
|
49aa6dbb7d625a529dc7cc00fc711016b4a758db
| 3,614
|
py
|
Python
|
scripts/collect.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | 5
|
2019-05-17T11:54:04.000Z
|
2020-10-06T18:45:17.000Z
|
scripts/collect.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | null | null | null |
scripts/collect.py
|
oveis/DeepVideoFaceSwap
|
e507f94d4f5d74c36e41c386c6fb14bb745a4885
|
[
"MIT"
] | 5
|
2019-06-05T00:20:24.000Z
|
2019-09-15T15:40:23.000Z
|
#!/usr/bin python3
""" The script to collect training data """
import logging
import os
import cv2 as cv
import numpy as np
from google_images_download import google_images_download as gid
from lib.utils import get_folder
from os.path import exists, isfile, join
logger = logging.getLogger(__name__) # pylint: disable=invalid-name
FRONT_FACE_CASCADE = cv.CascadeClassifier('scripts/haarcascades/haarcascade_frontalface_default.xml')
PROFILE_FACE_CASCADE = cv.CascadeClassifier('scripts/haarcascades/haarcascade_profileface.xml')
# TODO: Need a function to put images in S3 bucket.
# TODO: Retrieve face images from a given video file.
class Collect():
""" Data collect process. """
def __init__(self, arguments):
logger.debug("Initializing %s: (args: %s", self.__class__.__name__, arguments)
self.args = arguments
self.output_dir = get_folder(self.args.output_dir)
self.limit = self.args.limit
self.keywords = self.args.keywords
self.driver_path = self.args.driver_path
self.extract_face = False
self.face_img_shape = (64, 64)
logger.debug("Initialized %s", self.__class__.__name__)
def process(self):
images_dir = join(self.output_dir, 'images')
# Images are downloaded in 'images_dir/<keywords>'.
self._download_images_from_google(images_dir)
# Extract faces from images.
if self.extract_face:
faces_dir = join(self.output_dir, 'faces')
self._detect_and_save_faces(join(images_dir, self.keywords), join(faces_dir, self.keywords))
# Examples: https://google-images-download.readthedocs.io/en/latest/examples.html
# Argument: https://google-images-download.readthedocs.io/en/latest/arguments.html
def _download_images_from_google(self, output_dir):
self._check_dir_path(output_dir)
params = {
'keywords': self.keywords,
"limit": self.limit,
'output_directory': output_dir
}
if self.limit >= 100:
params['chromedriver'] = self.driver_path
downloader = gid.googleimagesdownload()
downloader.download(params)
def _save_faces(self, img, faces, output_dir, file_id):
self._check_dir_path(output_dir)
for i in range(len(faces)):
x, y, w, h = faces[i]
face_img = img[y:y+h, x:x+w]
output_file_path = join(output_dir, '{}_{}.jpeg'.format(file_id, i))
print(output_file_path)
face_img = cv.resize(face_img, self.face_img_shape)
cv.imwrite(output_file_path, face_img)
def _detect_and_save_faces(self, images_dir, faces_dir):
self._check_dir_path(images_dir)
self._check_dir_path(faces_dir)
file_names = [f for f in os.listdir(images_dir) if isfile(join(images_dir, f))]
for file_name in file_names:
file_id = file_name.split('.')[0]
img = cv.imread(join(images_dir, file_name))
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
frontal_faces = FRONT_FACE_CASCADE.detectMultiScale(gray, 1.3, 5)
self._save_faces(img, frontal_faces, join(faces_dir, 'frontal'), file_id)
profile_faces = PROFILE_FACE_CASCADE.detectMultiScale(gray, 1.3, 5)
self._save_faces(img, profile_faces, join(faces_dir, 'profile'), file_id)
def _check_dir_path(self, dir_path):
if not exists(dir_path):
os.makedirs(dir_path)
| 36.505051
| 104
| 0.649972
| 2,973
| 0.822634
| 0
| 0
| 0
| 0
| 0
| 0
| 715
| 0.197842
|
49aacdd586494ba24976083e9c7c711f99d594ea
| 1,132
|
py
|
Python
|
data_split.py
|
TalSchuster/FewRel
|
af68f52b13977ca29808c38a54995363f76cdcad
|
[
"MIT"
] | null | null | null |
data_split.py
|
TalSchuster/FewRel
|
af68f52b13977ca29808c38a54995363f76cdcad
|
[
"MIT"
] | null | null | null |
data_split.py
|
TalSchuster/FewRel
|
af68f52b13977ca29808c38a54995363f76cdcad
|
[
"MIT"
] | null | null | null |
import os
import random
from shutil import copyfile
import json
random.seed(123)
ROOT_PATH = './data/'
k = 5
target_path = './data/wiki_5_splits/'
'''
Splits the training set to 5 folds.
In each split, the held out set is used for test.
'''
path = os.path.join(ROOT_PATH, 'train_wiki' + '.json')
data = json.load(open(path, 'r'))
relations = list(data.keys())
num_relations = len(relations)
rels_per_split = round(num_relations / k)
random.shuffle(relations)
for i in range(k):
split_val_rels = relations[i*rels_per_split: (i+1)*rels_per_split]
split_train = {}
split_val = {}
for rel, examples in data.items():
if rel in split_val_rels:
split_val[rel] = examples
else:
split_train[rel] = examples
print(f"split {i}: train: {len(split_val.keys())}, test: {len(split_train.keys())}")
os.makedirs(os.path.join(target_path, str(i)), exist_ok=True)
with open(os.path.join(target_path, str(i), 'train.json'), 'w') as f:
json.dump(split_train, f)
with open(os.path.join(target_path, str(i), 'val.json'), 'w') as f:
json.dump(split_val, f)
| 25.155556
| 88
| 0.655477
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 252
| 0.222615
|
49aaf3536a9b3013f2535a7951571b5299a8099f
| 604
|
py
|
Python
|
heisen/core/__init__.py
|
HeisenCore/heisen
|
0cd4d27822960553a8e83a72c7dfeefa76e65c06
|
[
"MIT"
] | 5
|
2016-08-30T07:51:08.000Z
|
2021-09-13T11:30:05.000Z
|
heisen/core/__init__.py
|
HeisenCore/heisen
|
0cd4d27822960553a8e83a72c7dfeefa76e65c06
|
[
"MIT"
] | 15
|
2016-09-15T19:21:24.000Z
|
2016-10-22T16:22:15.000Z
|
heisen/core/__init__.py
|
HeisenCore/heisen
|
0cd4d27822960553a8e83a72c7dfeefa76e65c06
|
[
"MIT"
] | null | null | null |
from heisen.config import settings
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': []}
for instance_number in range(settings.INSTANCE_COUNT):
servers['self'].append((
'localhost', settings.RPC_PORT + instance_number, username, passowrd
))
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| 27.454545
| 80
| 0.692053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 44
| 0.072848
|
49abd960ef01b21e1a602cfce947ec5f7f32f14e
| 3,182
|
py
|
Python
|
pychron/processing/analysis_graph.py
|
aelamspychron/pychron
|
ad87c22b0817c739c7823a24585053041ee339d5
|
[
"Apache-2.0"
] | null | null | null |
pychron/processing/analysis_graph.py
|
aelamspychron/pychron
|
ad87c22b0817c739c7823a24585053041ee339d5
|
[
"Apache-2.0"
] | 20
|
2020-09-09T20:58:39.000Z
|
2021-10-05T17:48:37.000Z
|
pychron/processing/analysis_graph.py
|
aelamspychron/pychron
|
ad87c22b0817c739c7823a24585053041ee339d5
|
[
"Apache-2.0"
] | null | null | null |
# ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
from traits.api import Event
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.graph.graph import Graph
from pychron.graph.stacked_graph import StackedGraph
from pychron.graph.stacked_regression_graph import StackedRegressionGraph
class AnalysisGraph(Graph):
rescale_event = Event
figure_event = Event
def get_rescale_actions(self):
return [('Valid Analyses', 'rescale_to_valid', {})]
def rescale_to_valid(self):
self.rescale_event = 'valid'
def rescale_x_axis(self):
self.rescale_event = 'x'
def rescale_y_axis(self):
self.rescale_event = 'y'
class AnalysisStackedGraph(AnalysisGraph, StackedGraph):
pass
class AnalysisStackedRegressionGraph(AnalysisGraph, StackedRegressionGraph):
pass
class SpectrumGraph(AnalysisStackedGraph):
# make_alternate_figure_event = Event
def get_child_context_menu_actions(self):
return [self.action_factory('Ideogram...', 'make_ideogram'),
self.action_factory('Inverse Isochron...', 'make_inverse_isochron'),
self.action_factory('Tag Non Plateau...', 'tag_non_plateau')]
def tag_non_plateau(self):
self.figure_event = ('tag', 'tag_non_plateau')
def make_ideogram(self):
self.figure_event = 'alternate_figure', 'Ideogram'
def make_inverse_isochron(self):
self.figure_event = 'alternate_figure', 'InverseIsochron'
class IdeogramGraph(AnalysisStackedGraph):
def get_child_context_menu_actions(self):
return [self.action_factory('Correlation...', 'make_correlation'),
self.action_factory('Identify Peaks', 'identify_peaks')]
def make_correlation(self):
self.figure_event = ('correlation', (self.selected_plotid, self.selected_plot.y_axis.title))
def identify_peaks(self):
self.figure_event = ('identify_peaks', None)
class ReferencesGraph(AnalysisStackedRegressionGraph):
def get_child_context_menu_actions(self):
return [self.action_factory('Correlation...', 'make_correlation')]
def make_correlation(self):
self.figure_event = ('correlation', (self.selected_plot, self.selected_plot.y_axis.title))
# ============= EOF =============================================
| 33.851064
| 100
| 0.653363
| 1,928
| 0.605908
| 0
| 0
| 0
| 0
| 0
| 0
| 1,400
| 0.439975
|
49ac5028ee971f3e584f2c491889fc4e4b16901b
| 3,023
|
py
|
Python
|
stub/nginx-status-stub.py
|
geld-tech/nginx-monitor-dashboard
|
3fcd3bd184a0348095c4f4ec91a46ab98ee0ca80
|
[
"Apache-2.0"
] | 1
|
2018-07-30T14:01:36.000Z
|
2018-07-30T14:01:36.000Z
|
stub/nginx-status-stub.py
|
geld-tech/nginx-monitor-dashboard
|
3fcd3bd184a0348095c4f4ec91a46ab98ee0ca80
|
[
"Apache-2.0"
] | null | null | null |
stub/nginx-status-stub.py
|
geld-tech/nginx-monitor-dashboard
|
3fcd3bd184a0348095c4f4ec91a46ab98ee0ca80
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
"""
NGINX Status Stub
Returns sample resources usage
"""
import logging
import logging.handlers
import random
from optparse import OptionParser
from flask import Flask
app = Flask(__name__)
app.debug = True
# Initialisation
logging.basicConfig(format='[%(asctime)-15s] [%(threadName)s] %(levelname)s %(message)s', level=logging.INFO)
logger = logging.getLogger('root')
@app.route("/")
@app.route("/nginx_status", strict_slashes=False)
def nginx_status():
response = '''Active connections: {active}
server accepts handled requests
1650 1650 9255
Reading: {reading} Writing: {writing} Waiting: {waiting}'''.format(active = random.randint(1, 3),
reading = random.randint(0, 3),
writing = random.randint(1, 3),
waiting = random.randint(1, 5))
return response, 200
@app.route("/v")
@app.route("/version", strict_slashes=False)
def version():
response = 'nginx version: nginx/1.10.3 (Ubuntu)'
return response, 200
@app.route("/version_full", strict_slashes=False)
@app.route("/version/full", strict_slashes=False)
def full_version():
response = '''nginx version: nginx/1.10.3 (Ubuntu)
built with OpenSSL 1.0.2g 1 Mar 2016
TLS SNI support enabled
configure arguments: --with-cc-opt='-g -O2 -fPIE -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2' --with-ld-opt='-Wl,-Bsymbolic-functions -fPIE -pie -Wl,-z,relro -Wl,-z,now' --prefix=/usr/share/nginx --conf-path=/etc/nginx/nginx.conf --http-log-path=/var/log/nginx/access.log --error-log-path=/var/log/nginx/error.log --lock-path=/var/lock/nginx.lock --pid-path=/run/nginx.pid --http-client-body-temp-path=/var/lib/nginx/body --http-fastcgi-temp-path=/var/lib/nginx/fastcgi --http-proxy-temp-path=/var/lib/nginx/proxy --http-scgi-temp-path=/var/lib/nginx/scgi --http-uwsgi-temp-path=/var/lib/nginx/uwsgi --with-debug --with-pcre-jit --with-ipv6 --with-http_ssl_module --with-http_stub_status_module --with-http_realip_module --with-http_auth_request_module --with-http_addition_module --with-http_dav_module --with-http_geoip_module --with-http_gunzip_module --with-http_gzip_static_module --with-http_image_filter_module --with-http_v2_module --with-http_sub_module --with-http_xslt_module --with-stream --with-stream_ssl_module --with-mail --with-mail_ssl_module --with-threads'''
return response, 200
if __name__ == "__main__":
# Parse options
opts_parser = OptionParser()
opts_parser.add_option('--port', type="int", dest='port', help='IP Port to listen to.', default=8000)
opts_parser.add_option('--debug', action='store_true', dest='debug', help='Print verbose output.', default=False)
options, args = opts_parser.parse_args()
if options.debug:
logger.setLevel(logging.DEBUG)
logger.debug('Enabled DEBUG logging level.')
logger.info('Options parsed')
app.run(host='0.0.0.0', port=options.port)
| 50.383333
| 1,124
| 0.700629
| 0
| 0
| 0
| 0
| 2,068
| 0.684089
| 0
| 0
| 1,805
| 0.597089
|
49ad0529acc7b30e818083fbddf61cedb7ec9149
| 1,616
|
py
|
Python
|
test_question4.py
|
fmakawa/Practice
|
7f6eaa1dde4e46088ca5dcee76de1bb56a363238
|
[
"MIT"
] | null | null | null |
test_question4.py
|
fmakawa/Practice
|
7f6eaa1dde4e46088ca5dcee76de1bb56a363238
|
[
"MIT"
] | null | null | null |
test_question4.py
|
fmakawa/Practice
|
7f6eaa1dde4e46088ca5dcee76de1bb56a363238
|
[
"MIT"
] | null | null | null |
"""
Question 4
Level 1
Question:
Write a program which accepts a sequence of comma-separated numbers from console and generate a list and a tuple which contains every number.
Suppose the following input is supplied to the program:
34,67,55,33,12,98
Then, the output should be:
['34', '67', '55', '33', '12', '98']
('34', '67', '55', '33', '12', '98')
Hints:
In case of input data being supplied to the question, it should be assumed to be a console input.
tuple() method can convert list to tuple
"""
import unittest
from unittest.mock import patch
from question4 import listicle, tuplicle, listpicle
class TestDict(unittest.TestCase):
@patch('builtins.input', lambda *args: '34,67,55,33,12,98')
def test_list(self):
d=listicle()
self.assertEqual(d, ['34', '67', '55', '33', '12', '98'], "Supposed to equal ['34', '67', '55', '33', '12', '98']")
@patch('builtins.input', lambda *args: '34,67,55,33,12,98')
def test_tuple(self):
d = tuplicle()
self.assertEqual(d, ('34', '67', '55', '33', '12', '98'),"Supposed to equal ('34', '67', '55', '33', '12', '98')")
@patch('builtins.input', lambda *args: '34,67,55,33,12,98')
def test_listpicle(self):
d = listpicle()
print(d)
self.assertEqual(d[0], ['34', '67', '55', '33', '12', '98'],"Supposed to equal ['34', '67', '55', '33', '12', '98']")
self.assertEqual(d[1], ('34', '67', '55', '33', '12', '98'),"Supposed to equal ('34', '67', '55', '33', '12', '98')")
suite = unittest.TestLoader().loadTestsFromTestCase(TestDict)
unittest.TextTestRunner(verbosity=2).run(suite)
| 36.727273
| 141
| 0.61448
| 897
| 0.555074
| 0
| 0
| 842
| 0.52104
| 0
| 0
| 927
| 0.573639
|
49ad08a13c544d4263d6239603d117433df3bf65
| 53
|
py
|
Python
|
src/poliastro/_math/integrate.py
|
DhruvJ22/poliastro
|
ac5fafc6d054b2c545e111e5a6aa32259998074a
|
[
"MIT"
] | 8
|
2015-05-09T17:21:57.000Z
|
2020-01-28T06:59:18.000Z
|
src/poliastro/_math/integrate.py
|
DhruvJ22/poliastro
|
ac5fafc6d054b2c545e111e5a6aa32259998074a
|
[
"MIT"
] | 4
|
2015-12-29T13:08:01.000Z
|
2019-12-27T12:58:04.000Z
|
src/poliastro/_math/integrate.py
|
DhruvJ22/poliastro
|
ac5fafc6d054b2c545e111e5a6aa32259998074a
|
[
"MIT"
] | 1
|
2016-10-05T08:34:44.000Z
|
2016-10-05T08:34:44.000Z
|
from scipy.integrate import quad
__all__ = ["quad"]
| 13.25
| 32
| 0.735849
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 6
| 0.113208
|
49ad2866726183e18afb70540beb33954b2be143
| 543
|
py
|
Python
|
app/tasks/uwu/uwu.py
|
tahosa/discord-util-bot
|
2f261c5ae06da8a62e72502b53341720437860f5
|
[
"MIT"
] | null | null | null |
app/tasks/uwu/uwu.py
|
tahosa/discord-util-bot
|
2f261c5ae06da8a62e72502b53341720437860f5
|
[
"MIT"
] | null | null | null |
app/tasks/uwu/uwu.py
|
tahosa/discord-util-bot
|
2f261c5ae06da8a62e72502b53341720437860f5
|
[
"MIT"
] | 1
|
2022-02-09T04:16:54.000Z
|
2022-02-09T04:16:54.000Z
|
import logging
import discord
import discord.ext.commands as commands
_LOG = logging.getLogger('discord-util').getChild("uwu")
class Uwu(commands.Cog):
@commands.Cog.listener()
async def on_message(self, message: discord.Message):
if message.content.lower().startswith('hello bot') or message.content.lower().startswith('hewwo bot'):
await message.channel.send('Hewwo uwu')
return
if message.content.lower().startswith('good bot'):
await message.add_reaction("\N{FLUSHED FACE}")
| 31.941176
| 110
| 0.685083
| 412
| 0.758748
| 0
| 0
| 383
| 0.705341
| 354
| 0.651934
| 80
| 0.14733
|
49add70868769fd8f813dafc8912a925207ca004
| 4,011
|
py
|
Python
|
rocket.py
|
FrCln/SpaceGarbage
|
0e121143888b108eac2b86b1dd9fcbf20dcef36e
|
[
"MIT"
] | null | null | null |
rocket.py
|
FrCln/SpaceGarbage
|
0e121143888b108eac2b86b1dd9fcbf20dcef36e
|
[
"MIT"
] | null | null | null |
rocket.py
|
FrCln/SpaceGarbage
|
0e121143888b108eac2b86b1dd9fcbf20dcef36e
|
[
"MIT"
] | null | null | null |
import math
import os
from curses_tools import draw_frame, get_frame_size
def _limit(value, min_value, max_value):
"""Limit value by min_value and max_value."""
if value < min_value:
return min_value
if value > max_value:
return max_value
return value
def _apply_acceleration(speed, speed_limit, forward=True):
"""Change speed — accelerate or brake — according to force direction."""
speed_limit = abs(speed_limit)
speed_fraction = speed / speed_limit
# если корабль стоит на месте, дергаем резко
# если корабль уже летит быстро, прибавляем медленно
delta = math.cos(speed_fraction) * 0.75
if forward:
result_speed = speed + delta
else:
result_speed = speed - delta
result_speed = _limit(result_speed, -speed_limit, speed_limit)
# если скорость близка к нулю, то останавливаем корабль
if abs(result_speed) < 0.1:
result_speed = 0
return result_speed
class Rocket:
def __init__(self, canvas, init_x, init_y, delay):
self.canvas = canvas
self.x = init_x
self.y = init_y
frames = []
for n in 1, 2:
with open(os.path.join('rocket', f'rocket_frame_{n}.txt')) as f:
frames.append(f.read())
self.frames = []
for frame in frames:
for i in range(delay):
self.frames.append(frame)
self.current_frame = 0
self.height, self.width = get_frame_size(self.frames[0])
self.row_speed = 0
self.column_speed = 0
def update_speed(self, rows_direction, columns_direction, row_speed_limit=2, column_speed_limit=2, fading=0.9):
"""Update speed smootly to make control handy for player. Return new speed value (row_speed, column_speed)
rows_direction — is a force direction by rows axis. Possible values:
-1 — if force pulls up
0 — if force has no effect
1 — if force pulls down
columns_direction — is a force direction by colums axis. Possible values:
-1 — if force pulls left
0 — if force has no effect
1 — if force pulls right
"""
if rows_direction not in (-1, 0, 1):
raise ValueError(f'Wrong rows_direction value {rows_direction}. Expects -1, 0 or 1.')
if columns_direction not in (-1, 0, 1):
raise ValueError(f'Wrong columns_direction value {columns_direction}. Expects -1, 0 or 1.')
if fading < 0 or fading > 1:
raise ValueError(f'Wrong fading value {fading}. Expects float between 0 and 1.')
# гасим скорость, чтобы корабль останавливался со временем
self.row_speed *= fading
self.column_speed *= fading
row_speed_limit, column_speed_limit = abs(row_speed_limit), abs(column_speed_limit)
if rows_direction != 0:
self.row_speed = _apply_acceleration(self.row_speed, row_speed_limit, rows_direction > 0)
if columns_direction != 0:
self.column_speed = _apply_acceleration(self.column_speed, column_speed_limit, columns_direction > 0)
def update(self):
h, w = self.canvas.getmaxyx()
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)],
negative=True
)
self.x += self.column_speed
if not 0 < self.x < w - self.width:
self.x -= self.column_speed
self.y += self.row_speed
if not 0 < self.y < h - self.height:
self.y -= self.row_speed
self.current_frame = (self.current_frame + 0.5) % len(self.frames)
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)]
)
def destroy(self):
draw_frame(
self.canvas,
self.y,
self.x,
self.frames[int(self.current_frame)],
negative=True
)
| 31.335938
| 115
| 0.607828
| 3,106
| 0.738996
| 0
| 0
| 0
| 0
| 0
| 0
| 1,250
| 0.297407
|
49ae3d28975be04fc1299eea9d4febbbbbb376de
| 7,963
|
py
|
Python
|
src/roll.py
|
SimonPerche/PersonalitiesWars
|
495803a5be5e9fde572c3f39086d8a3510c75f58
|
[
"MIT"
] | null | null | null |
src/roll.py
|
SimonPerche/PersonalitiesWars
|
495803a5be5e9fde572c3f39086d8a3510c75f58
|
[
"MIT"
] | null | null | null |
src/roll.py
|
SimonPerche/PersonalitiesWars
|
495803a5be5e9fde572c3f39086d8a3510c75f58
|
[
"MIT"
] | 1
|
2022-03-08T22:07:50.000Z
|
2022-03-08T22:07:50.000Z
|
import secrets
import asyncio
from datetime import datetime, timedelta
import discord
from discord.ext import commands
from database import DatabasePersonality, DatabaseDeck
class Roll(commands.Cog):
def __init__(self, bot):
"""Initial the cog with the bot."""
self.bot = bot
#### Commands ####
@commands.command(description='Roll a random idom and get the possibility to claim it.')
async def roll(self, ctx):
minutes = min_until_next_roll(ctx.guild.id, ctx.author.id)
if minutes != 0:
await ctx.send(f'You cannot roll right now. '
f'Next rolls reset **<t:{int((datetime.now().replace(minute=0) + timedelta(hours=1)).timestamp())}:R>**.')
return
perso = None
id_perso = None
msg_embed = ''
while not perso:
id_perso = DatabasePersonality.get().get_random_perso_id()
perso = DatabasePersonality.get().get_perso_information(id_perso)
# Update roll information in database
DatabaseDeck.get().update_last_roll(ctx.guild.id, ctx.author.id)
user_nb_rolls = DatabaseDeck.get().get_nb_rolls(ctx.guild.id, ctx.author.id)
DatabaseDeck.get().set_nb_rolls(ctx.guild.id, ctx.author.id, user_nb_rolls + 1)
max_rolls = DatabaseDeck.get().get_rolls_per_hour(ctx.guild.id)
if max_rolls - user_nb_rolls - 1 == 2:
msg_embed += f'{ctx.author.name if ctx.author.nick is None else ctx.author.nick}, 2 uses left.\n'
# Get badges information
badges_with_perso = DatabaseDeck.get().get_badges_with(ctx.guild.id, id_perso)
if badges_with_perso:
msg_embed += f'**Required for {",".join([badge["name"] for badge in badges_with_perso])}' \
f' badge{"" if len(badges_with_perso) == 1 else "s"}!**\n'
current_image = DatabaseDeck.get().get_perso_current_image(ctx.guild.id, id_perso)
embed = discord.Embed(title=perso['name'], description=perso['group'], colour=secrets.randbelow(0xffffff))
if current_image:
embed.set_image(url=current_image)
id_owner = DatabaseDeck.get().perso_belongs_to(ctx.guild.id, id_perso)
if id_owner:
owner = ctx.guild.get_member(id_owner)
# Could be None if the user left the server
if owner:
text = f'Belongs to {owner.name if not owner.nick else owner.nick}'
if owner.avatar:
embed.set_footer(icon_url=owner.avatar.url, text=text)
else:
embed.set_footer(text=text)
# Mention users if they wish for this personality
id_members = DatabaseDeck.get().get_wished_by(ctx.guild.id, id_perso)
wish_msg = ''
for id_member in id_members:
member = ctx.guild.get_member(id_member)
# Could be None if the user left the server
if member:
wish_msg += f'{member.mention} '
if wish_msg:
msg_embed += f'Wished by {wish_msg}'
class ClaimButton(discord.ui.View):
def __init__(self, timeout: int):
super().__init__(timeout=timeout)
self.is_claimed = False
self.user_claim = None
@discord.ui.button(label="Claim", emoji='💕', style=discord.ButtonStyle.green)
async def claim(self, button: discord.ui.Button, interaction: discord.Interaction):
self.user_claim = interaction.user
self.is_claimed = True
self.disable()
async def interaction_check(self, interaction: discord.Interaction) -> bool:
time_until_claim = min_until_next_claim(interaction.guild.id, interaction.user.id)
if time_until_claim != 0:
cant_claiming_username = interaction.user.name if interaction.user.nick is None else interaction.user.nick
await interaction.response.send_message(f'{cant_claiming_username}, you can\'t claim right now. '
f'Ready **<t:{int((datetime.now() + timedelta(minutes=time_until_claim)).timestamp())}:R>**.')
return False
return True
def disable(self):
for child in self.children:
child.disabled = True
self.stop()
claim_timeout = DatabaseDeck.get().get_server_configuration(ctx.guild.id)["time_to_claim"]
claim_button_view = ClaimButton(timeout=claim_timeout)
# Cannot claim if perso already claim
if id_owner:
await ctx.send(msg_embed, embed=embed)
return
msg = await ctx.send(msg_embed, embed=embed, view=claim_button_view)
await claim_button_view.wait()
# Timeout
if not claim_button_view.is_claimed:
claim_button_view.disable()
await msg.edit(view=claim_button_view)
else:
user = claim_button_view.user_claim
username = user.name if user.nick is None else user.nick
DatabaseDeck.get().add_to_deck(ctx.guild.id, perso['id'], user.id)
await ctx.send(f'{username} claims {perso["name"]}!')
if user.avatar:
embed.set_footer(icon_url=user.avatar.url, text=f'Belongs to {username}')
else:
embed.set_footer(text=f'Belongs to {username}')
await msg.edit(embed=embed, view=claim_button_view)
if badges_with_perso:
ids_deck = DatabaseDeck.get().get_user_deck(ctx.guild.id, user.id)
msg_badges_progression = ''
for badge in badges_with_perso:
perso_in_badge = DatabaseDeck.get().get_perso_in_badge(badge['id'])
count = sum([id_perso in ids_deck for id_perso in perso_in_badge])
nb_perso = len(perso_in_badge)
if perso['id'] in perso_in_badge and count == nb_perso:
await ctx.send(f'**{user.mention}, you have just unlocked {badge["name"]} badge!**')
msg_badges_progression += f'{badge["name"]} {count}/{nb_perso}\n'
badge_embed = discord.Embed(title=f'Badges progression with {perso["name"]}',
description=msg_badges_progression)
await ctx.send(embed=badge_embed)
#### Utilities functions ####
def min_until_next_claim(id_server, id_user):
"""Return minutes until next claim (0 if the user can claim now)."""
last_claim = DatabaseDeck.get().get_last_claim(id_server, id_user)
time_until_claim = 0
if last_claim:
claim_interval = DatabaseDeck.get().get_server_configuration(id_server)['claim_interval']
date_last_claim = datetime.strptime(last_claim, '%Y-%m-%d %H:%M:%S')
minute_since_last_claim = int(divmod((datetime.now() - date_last_claim).total_seconds(), 60)[0])
if minute_since_last_claim < claim_interval:
time_until_claim = claim_interval - minute_since_last_claim
return time_until_claim
def min_until_next_roll(id_server, id_user):
"""Return minutes until next roll (0 if the user can roll now)."""
last_roll = DatabaseDeck.get().get_last_roll(id_server, id_user)
if not last_roll:
return 0
last_roll = datetime.strptime(last_roll, '%Y-%m-%d %H:%M:%S')
now = datetime.now()
# If a new hour began
if now.date() != last_roll.date() or (now.date() == last_roll.date() and now.hour != last_roll.hour):
DatabaseDeck.get().set_nb_rolls(id_server, id_user, 0)
return 0
max_rolls = DatabaseDeck.get().get_rolls_per_hour(id_server)
user_nb_rolls = DatabaseDeck.get().get_nb_rolls(id_server, id_user)
if user_nb_rolls < max_rolls:
return 0
else:
return 60 - now.minute
| 41.259067
| 154
| 0.615346
| 6,320
| 0.793372
| 0
| 0
| 6,169
| 0.774416
| 6,076
| 0.762742
| 1,489
| 0.186919
|
49ae4cab0439ba556dfe9b168c615e0466cf0551
| 2,195
|
py
|
Python
|
test.py
|
mltnhm/sr-turtle
|
d839eeb50e4ba70cfc2a4070c9f6fda2f0b19ca2
|
[
"MIT"
] | 1
|
2020-04-16T18:06:13.000Z
|
2020-04-16T18:06:13.000Z
|
test.py
|
mltnhm/sr-turtle
|
d839eeb50e4ba70cfc2a4070c9f6fda2f0b19ca2
|
[
"MIT"
] | 3
|
2019-05-11T20:39:31.000Z
|
2019-11-13T10:51:59.000Z
|
test.py
|
mltnhm/sr-turtle
|
d839eeb50e4ba70cfc2a4070c9f6fda2f0b19ca2
|
[
"MIT"
] | 1
|
2019-11-12T08:02:52.000Z
|
2019-11-12T08:02:52.000Z
|
from __future__ import print_function
import time
from sr.robot import *
SEARCHING = "SEARCHING"
DRIVING = "DRIVING"
R = Robot()
def drive(speed, seconds):
R.motors[0].m0.power = speed
R.motors[0].m1.power = speed
time.sleep(seconds)
R.motors[0].m0.power = 0
R.motors[0].m1.power = 0
def turn(speed, seconds):
R.motors[0].m0.power = speed
R.motors[0].m1.power = -speed
time.sleep(seconds)
R.motors[0].m0.power = 0
R.motors[0].m1.power = 0
state = SEARCHING
def get_gold_tokens():
gold_tokens = []
for token in R.see():
if token.info.marker_type is MARKER_TOKEN_GOLD:
gold_tokens.append(token)
# Sort list with the closest token first
gold_tokens.sort(key=lambda m: m.dist)
return gold_tokens
while True:
if state == SEARCHING:
print("Searching for gold tokens...")
tokens = get_gold_tokens()
print(tokens)
if len(tokens) > 0:
m = tokens[0]
# TODO: Pick the closest token, not just any token.
print("Token sighted. {0} is {1}m away, bearing {2} degrees." \
.format(m.info.offset, m.dist, m.rot_y))
state = DRIVING
else:
print("Can't see anything.")
turn(25, 0.3)
time.sleep(0.2)
elif state == DRIVING:
print("Aligning...")
tokens = get_gold_tokens()
if len(tokens) == 0:
state = SEARCHING
else:
m = tokens[0]
if m.dist < 0.4:
print("Found it!")
if R.grab():
print("Gotcha!")
turn(50, 0.5)
drive(50, 1)
R.release()
drive(-50, 0.5)
else:
print("Aww, I'm not close enough.")
exit()
elif -15 <= m.rot_y <= 15:
print("Ah, that'll do.")
drive(50, 0.5)
elif m.rot_y < -15:
print("Left a bit...")
turn(-12.5, 0.5)
elif m.rot_y > 15:
print("Right a bit...")
turn(12.5, 0.5)
| 25.229885
| 75
| 0.491116
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 326
| 0.148519
|
49aebc3c829e124d35af1e1fc14ed2a19ad3ba06
| 9,218
|
py
|
Python
|
ATIVIDAS UF/exemplos.py
|
alverad-katsuro/Python
|
6ba3cc604fd9cde3ee012fcf17bbf6cd944e8c38
|
[
"MIT"
] | null | null | null |
ATIVIDAS UF/exemplos.py
|
alverad-katsuro/Python
|
6ba3cc604fd9cde3ee012fcf17bbf6cd944e8c38
|
[
"MIT"
] | null | null | null |
ATIVIDAS UF/exemplos.py
|
alverad-katsuro/Python
|
6ba3cc604fd9cde3ee012fcf17bbf6cd944e8c38
|
[
"MIT"
] | null | null | null |
from math import log
def ef_cache():
acerto = eval(input("Digite a quantidade de acertos: "))
acessos = eval(input("Digite a quantidade de acessos: "))
e = acerto / acessos
return e
def bit_dados():
cap = eval(input("Digite a capacidade da cache: "))
byte = eval(input("Digite a quantidade de bits ou byte: "))
cap_larg = cap * byte
return cap, byte, cap_larg
def bit_tag(qt_blocos, qt_linhas):
bits_tag = qt_blocos / qt_linhas
print(f"A quantidade de bits na tag é {log(bits_tag, 2)}")
qt_bitsnas_linhas = qt_linhas * log(bits_tag, 2)
return qt_bitsnas_linhas
def dese_memodire(capacidade, largura_linhas, linhas):
cap_bit = int(log(capacidade, 2))
larg_lin_bit = int(log(largura_linhas, 2))
lin_bit = int(log(linhas, 2))
print("{:-^33}".format(str(cap_bit) + " bits"))
print("{}{: ^30}{}".format((cap_bit - lin_bit - larg_lin_bit), lin_bit, larg_lin_bit))
print("{:-^33}".format(''))
return (cap_bit - lin_bit - larg_lin_bit), lin_bit, larg_lin_bit, cap_bit
def estru_memoasso():
cap = eval(input("Digite a capacidade da MP: "))
larg_block = eval(input("Digite a largura da memória: "))
return int(log(cap / larg_block, 2)), int(log(larg_block, 2))
def conv_pot(pot):
if pot < 11:
if pot == 10:
pot0 = pot - 10
letra = "Kbit"
else:
pot0 = pot
letra = "bit"
elif 10 < pot < 21:
if pot == 20:
pot0 = pot - 20
letra = "Mbit"
else:
pot0 = pot - 10
letra = "Kbit"
elif 20 < pot < 31:
if pot == 30:
pot0 = pot - 30
letra = "Gbit"
else:
pot0 = pot - 20
letra = "Mbit"
return pot0, letra
def exemplo5_1():
print("Exemplo 5.1")
print("Um determinado sistema de computação possui uma memória cache, MP e processador.")
print("Em operações normais, obtêm-se 96 acertos para cada 100 acessos do processador às memórias.", end=" ")
print("Qual deve ser a eficiência do sistema cache/MP")
e = ef_cache()
print(f"A eficiencia é {e * 100}%")
def exemplo5_2():
print("Exemplo 5.2")
print("Cálculo da quantidade de bits necessários para uma determianda memória cache")
print("Considere um sistema de computação com uma memória cache de 32KB de capacidade,", end=" ")
print("constituida de linhas de linhas com 8 bytes de largura.", end=" ")
print("A MP possui uma capacidade de 16MB.")
cap_larg = bit_dados()
blocos = eval(input("Digite a capacidade da MP: ")) / cap_larg[1]
linhas = cap_larg[0] / cap_larg[1]
tag_bit = bit_tag(blocos, linhas)
pot = log(cap_larg[2] + tag_bit, 2)
pot_letra = conv_pot(pot)
print(f"A quantidade de bits necessários é {round(2 ** pot_letra[0], 0)} {pot_letra[1]}")
def exemplo5_3():
print("Exemplo 5.3")
print("Calcule o formato de endereço para memórias cache com mapeamento direto.")
print("Uma MP com 64MB de capacidade associada a uma memória cache de 2K linhas, cada uma com largura de 16 bytes.", end=" ")
print("Determine o formato do endereço para ser interpretado pelo sistema de controle da cache.")
capacidade = eval(input("Digite a capacidade da MP: "))
largura_linhas = eval(input("Digite a largura da cache: "))
linhas = eval(input("Digite a quantidade de linhas da cache: "))
dese_memodire(capacidade, largura_linhas, linhas)
def exemplo5_4():
print("Exemplo 5.4")
print("Seja uma MP constituida de blocos com largura de 32 bytes, associada a uma cache com 128KB.", end=' ')
print("Em dado instante o processador realiza um acesso, colocando o seguinte endereço 3FC92B6")
hexa = input("Digite o hexa")
binario = f'{int(hexa, 16):028b}'
capacidade = 2 ** (len(hexa) * 4)
largura_linhas = eval(input("Digite a largura da cache: "))
linhas = eval(input("Digite a capacidade do cache: ")) / largura_linhas
x = dese_memodire(capacidade, largura_linhas, linhas)
print("{:-^50}".format(str(x[3]) + " bits"))
print("{}{: ^30}{}".format((binario[0:x[0]]), (binario[x[0]:x[0] + x[1]]), (binario[x[0] + x[1]:])))
print("{:-^50}".format(''))
def exemplo5_5():
print("Exemplo 5.5")
print("Cálculo da quantidade de bits necessária para uma determinada memória cache.")
print("Considere um sistema de computação com uma memória cache de 32KB de capacidade,", end=" ")
print("constituida de linhas com 8 bytes de largura.", end=" ")
print("A MP possui uma capacidade de 16MB")
cap_larg = bit_dados()
linhas = cap_larg[0] / cap_larg[1]
blocos = eval(input("Digite a capacidade da MP")) / cap_larg[1]
bit_bloco_linha = linhas * log(blocos, 2)
pot = log(cap_larg[2] + bit_bloco_linha, 2)
pot_letra = conv_pot(pot)
print(f"A quantidade de bits necessários é {round(2 ** pot_letra[0], 0)} {pot_letra[1]}")
def exemplo5_6():
print("Exemplo 5.6")
print("Cálculo do formato de endereço para memórias cache com mapa associativo completo.")
print("Considere uma MP com 64MB de capacidade associdada a uma memória cache que possui 2K linhas,", end='')
print(" cada uma com largura de 16 bytes. ", end="")
print("Determine o formato do endereço para ser interpretado pelo sistema de controle da cache.")
t_blocos_pot_lar = estru_memoasso()
print("{:-^50}".format(str(t_blocos_pot_lar[0] + t_blocos_pot_lar[1]) + " bits"))
print("{}{: ^40}{}".format((t_blocos_pot_lar[0]), "", (t_blocos_pot_lar[1])))
print("{:-^50}".format(''))
def exemplo5_7():
print("Exemplo 5.7")
print("Seja uma MP constituída de blocos com largura de 32 bytes, associada a uma cache com 64KB.")
print("Em dado instante o processador realiza um acesso, colocando o seguinte endereço 3FC92B6.")
print("Qual deverá ser o valor binário do campo bloco que será localizado pelo sistema de controle de cache.")
hexa = input("Digite o hexa")
binario = f'{int(hexa, 16):028b}'
capacidade = len(hexa) * 4
largura = int(log(eval(input("Digite a largura: ")), 2))
print(binario)
print("{:-^50}".format(str(len(binario)) + " bits"))
print("{}{: ^20}{}".format((binario[:capacidade - largura]), "", (binario[capacidade - largura:])))
print("{:-^50}".format(''))
def exemplo5_8():
print("Exemplo 5.8")
print("Cálculo da quantidade de bits necessários para uma determinada memória cache,", end='')
print("que funciona com mapeamento por conjunto de quatro.")
print("Considere um sistema de computação com uma memória cache de 32KB de capacidade,", end='')
print(" constituída de linhas com 8 bytes de largura e conjunto de 4. A MP possui uma capacidade de 16MB")
cap_larg = bit_dados()
linhas = cap_larg[0] / cap_larg[1]
blocos = eval(input("Digite a capacidade da MP")) / cap_larg[1]
qt_conju = eval(input("Digite a quantidade de conjuntos da memória: "))
quant_bitconju = linhas / qt_conju
tamanho_tag = blocos / quant_bitconju * qt_conju
pot = log(cap_larg[2] + tamanho_tag, 2)
pot_letra = conv_pot(pot)
print(f"A quantidade de bits necessários é {round(2 ** pot_letra[0], 0)} {pot_letra[1]}")
def exemplo5_9():
print("Exemplo 5.9")
print("Cálculo de formato de endereço para memória cache com mapeamento associativo por conjunto.")
print("Considere uma MP com 64MB de capacidade associada a uma memória cache que funciona com ", end='')
print("mapeamento associativo por conjunto de 4 e que possui 32KB, com linhas de largura de 16 bytes. ")
print("Determine o formato do endereço para ser imterpretado pelo sistema de controle da cache.")
cap_larg = bit_dados()
linhas = cap_larg[0] / cap_larg[1] # cache / byte
blocos = eval(input("Digite a capacidade da MP")) / cap_larg[1] # byte
qt_conju = eval(input("Digite a quantidade de conjuntos da memória: "))
quant_bitconju = int(linhas / qt_conju)
tamanho_tag = int(blocos / quant_bitconju)
print("{:-^50}".format(str(int(log(blocos, 2) + qt_conju)) + " bits"))
print("{}{: ^40}{}".format((log(tamanho_tag, 2)), (log(quant_bitconju, 2)), (int(log(cap_larg[1], 2)))))
print("{:-^50}".format(''))
def exemplo5_10():
print("Exemplo 5.10")
print("Seja uma MP constituida de blocos com largura de 32 bytes, associada a uma cache com 64KB.", end=" ")
print("A cache usa mapeamento por conjunto de 4.", end=" ")
print("Em dado instante o processador realiza um acesso, ao seguinte endereço: 3FC92B6", end=" ")
print("Determine o conjunto binario a ser localizado pelo sistema de controle da cache.")
cap_larg = bit_dados()
linhas = cap_larg[0] / cap_larg[1]
hexa = input("Digite o hexa")
binario = f'{int(hexa, 16):028b}'
capacidade = len(hexa) * 4
qt_conjun = eval(input("Digite a quantidade de conjuntos: "))
bit_conju = int(log(linhas / qt_conjun, 2))
largura = int(log(cap_larg[1], 2))
tg = capacidade-largura-bit_conju
print("{:-^50}".format(str(len(binario)) + " bits"))
print("{}{: ^30}{}".format(binario[:tg], binario[tg:(capacidade-largura)], binario[capacidade-largura:capacidade]))
print("{:-^50}".format(''))
| 43.895238
| 129
| 0.653179
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 4,329
| 0.466588
|
49af0bc491e51d1946b18c865a7ad51bc62f12c7
| 15,786
|
py
|
Python
|
supvisors/tests/test_mainloop.py
|
julien6387/supvisors
|
4e32bce566dec2cf9e9a213a3698178030eb869b
|
[
"Apache-2.0"
] | 66
|
2017-01-05T11:28:34.000Z
|
2022-03-04T08:42:01.000Z
|
supvisors/tests/test_mainloop.py
|
julien6387/supvisors
|
4e32bce566dec2cf9e9a213a3698178030eb869b
|
[
"Apache-2.0"
] | 36
|
2016-12-30T10:46:58.000Z
|
2022-01-09T22:56:10.000Z
|
supvisors/tests/test_mainloop.py
|
julien6387/supvisors
|
4e32bce566dec2cf9e9a213a3698178030eb869b
|
[
"Apache-2.0"
] | 12
|
2017-03-04T04:53:51.000Z
|
2022-01-28T13:03:22.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ======================================================================
# Copyright 2017 Julien LE CLEACH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ======================================================================
import pytest
from supvisors.mainloop import *
from supvisors.ttypes import AddressStates
from supvisors.utils import DeferredRequestHeaders
from threading import Thread
from unittest.mock import call, patch, Mock, DEFAULT
from .base import DummyRpcInterface
@pytest.fixture
def mocked_rpc():
""" Fixture for the instance to test. """
rpc_patch = patch('supvisors.mainloop.getRPCInterface')
mocked_rpc = rpc_patch.start()
yield mocked_rpc
rpc_patch.stop()
@pytest.fixture
def main_loop(supvisors):
return SupvisorsMainLoop(supvisors)
def test_creation(supvisors, mocked_rpc, main_loop):
""" Test the values set at construction. """
assert isinstance(main_loop, Thread)
assert main_loop.supvisors is supvisors
assert not main_loop.stop_event.is_set()
assert main_loop.env == {'SUPERVISOR_SERVER_URL': 'http://127.0.0.1:65000',
'SUPERVISOR_USERNAME': '',
'SUPERVISOR_PASSWORD': ''}
assert mocked_rpc.call_args_list == [call('localhost', main_loop.env)]
def test_stopping(mocked_rpc, main_loop):
""" Test the get_loop method. """
assert not main_loop.stopping()
main_loop.stop_event.set()
assert main_loop.stopping()
def test_stop(mocker, mocked_rpc, main_loop):
""" Test the stopping of the main loop thread. """
mocked_join = mocker.patch.object(main_loop, 'join')
# try to stop main loop before it is started
main_loop.stop()
assert not main_loop.stop_event.is_set()
assert not mocked_join.called
# stop main loop when alive
mocker.patch.object(main_loop, 'is_alive', return_value=True)
main_loop.stop()
assert main_loop.stop_event.is_set()
assert mocked_join.call_count == 1
def test_run(mocker, main_loop):
""" Test the running of the main loop thread. """
mocked_evt = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.check_events')
mocked_req = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.check_requests')
mocked_poll = mocker.patch('supvisors.supvisorszmq.SupvisorsZmq.poll')
# patch one loops
mocker.patch.object(main_loop, 'stopping', side_effect=[False, False, True])
main_loop.run()
# test that poll was called once
assert mocked_poll.call_args_list == [call()]
# test that check_requests was called once
assert mocked_evt.call_count == 1
# test that check_events was called once
assert mocked_req.call_count == 1
def test_check_events(mocker, main_loop):
""" Test the processing of the events received. """
mocked_send = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_remote_comm_event')
# prepare context
mocked_sockets = Mock(**{'check_subscriber.return_value': None})
# test with empty socks
main_loop.check_events(mocked_sockets, 'poll result')
assert mocked_sockets.check_subscriber.call_args_list == [call('poll result')]
assert not mocked_send.called
# reset mocks
mocked_sockets.check_subscriber.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_subscriber.return_value = 'a message'
main_loop.check_events(mocked_sockets, 'poll result')
assert mocked_sockets.check_subscriber.call_args_list == [call('poll result')]
assert mocked_send.call_args_list == [call('event', '"a message"')]
def test_check_requests(mocker, main_loop):
""" Test the processing of the requests received. """
mocked_send = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_request')
# prepare context
mocked_sockets = Mock(**{'check_puller.return_value': None})
# test with empty socks
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert not mocked_sockets.disconnect_subscriber.called
assert not mocked_send.called
# reset mocks
mocked_sockets.check_puller.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_puller.return_value = DeferredRequestHeaders.ISOLATE_NODES, 'a message'
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert mocked_sockets.disconnect_subscriber.call_args_list == [call('a message')]
assert not mocked_send.called
# reset mocks
mocked_sockets.check_puller.reset_mock()
mocked_sockets.disconnect_subscriber.reset_mock()
# test with appropriate socks but with exception
mocked_sockets.check_puller.return_value = 'event', 'a message'
main_loop.check_requests(mocked_sockets, 'poll result')
assert mocked_sockets.check_puller.call_args_list == [call('poll result')]
assert not mocked_sockets.disconnect_subscriber.called
assert mocked_send.call_args_list == [call('event', 'a message')]
def test_check_node(mocker, mocked_rpc, main_loop):
""" Test the protocol to get the processes handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
mocked_evt = mocker.patch('supvisors.mainloop.SupvisorsMainLoop.send_remote_comm_event')
# test rpc error: no event is sent to local Supervisor
mocked_rpc.side_effect = ValueError
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_evt.call_count == 0
# test with a mocked rpc interface
dummy_info = [{'name': 'proc', 'group': 'appli', 'state': 10, 'start': 5,
'now': 10, 'pid': 1234, 'spawnerr': ''}]
rpc_intf = DummyRpcInterface()
mocked_all = rpc_intf.supervisor.getAllProcessInfo = Mock()
mocked_local = rpc_intf.supvisors.get_all_local_process_info = Mock(return_value=dummy_info)
mocked_addr = rpc_intf.supvisors.get_address_info = Mock()
rpc_intf.supvisors.get_master_address = Mock(return_value='10.0.0.5')
rpc_intf.supvisors.get_supvisors_state = Mock(return_value={'statename': 'RUNNING'})
mocked_rpc.return_value = rpc_intf
mocked_rpc.side_effect = None
mocked_rpc.reset_mock()
# test with address in isolation
for state in [AddressStates.ISOLATING, AddressStates.ISOLATED]:
mocked_addr.return_value = {'statecode': state}
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_args_list == [call('10.0.0.1', main_loop.env)]
expected = 'node_name:10.0.0.1 authorized:False master_node_name:10.0.0.5 supvisors_state:RUNNING'
assert mocked_evt.call_args_list == [call('auth', expected)]
assert not mocked_all.called
# reset counters
mocked_evt.reset_mock()
mocked_rpc.reset_mock()
# test with address not in isolation
for state in [AddressStates.UNKNOWN, AddressStates.CHECKING, AddressStates.RUNNING, AddressStates.SILENT]:
mocked_addr.return_value = {'statecode': state}
main_loop.check_node('10.0.0.1')
assert mocked_rpc.call_count == 1
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_evt.call_count == 2
assert mocked_local.call_count == 1
# reset counters
mocked_evt.reset_mock()
mocked_local.reset_mock()
mocked_rpc.reset_mock()
def test_start_process(mocker, mocked_rpc, main_loop):
""" Test the protocol to start a process handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = KeyError
main_loop.start_process('10.0.0.1', 'dummy_process', 'extra args')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supvisors = mocker.patch.object(rpc_intf.supvisors, 'start_args')
main_loop.start_process('10.0.0.1', 'dummy_process', 'extra args')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supvisors.call_count == 1
assert mocked_supvisors.call_args == call('dummy_process', 'extra args', False)
def test_stop_process(mocker, mocked_rpc, main_loop):
""" Test the protocol to stop a process handled by a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = ConnectionResetError
main_loop.stop_process('10.0.0.1', 'dummy_process')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supervisor, 'stopProcess')
main_loop.stop_process('10.0.0.1', 'dummy_process')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call('dummy_process', False)
def test_restart(mocker, mocked_rpc, main_loop):
""" Test the protocol to restart a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = OSError
main_loop.restart('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supervisor, 'restart')
main_loop.restart('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call()
def test_shutdown(mocker, mocked_rpc, main_loop):
""" Test the protocol to shutdown a remote Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = RPCError(12)
main_loop.shutdown('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_shutdown = mocker.patch.object(rpc_intf.supervisor, 'shutdown')
main_loop.shutdown('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_shutdown.call_count == 1
assert mocked_shutdown.call_args == call()
def test_restart_all(mocker, mocked_rpc, main_loop):
""" Test the protocol to restart Supvisors. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = OSError
main_loop.restart_all('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_supervisor = mocker.patch.object(rpc_intf.supvisors, 'restart')
main_loop.restart_all('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_supervisor.call_count == 1
assert mocked_supervisor.call_args == call()
def test_shutdown_all(mocker, mocked_rpc, main_loop):
""" Test the protocol to shutdown Supvisors. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocked_rpc.side_effect = RPCError(12)
main_loop.shutdown_all('10.0.0.1')
assert mocked_rpc.call_count == 2
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
# test with a mocked rpc interface
rpc_intf = DummyRpcInterface()
mocked_rpc.side_effect = None
mocked_rpc.return_value = rpc_intf
mocked_shutdown = mocker.patch.object(rpc_intf.supvisors, 'shutdown')
main_loop.shutdown_all('10.0.0.1')
assert mocked_rpc.call_count == 3
assert mocked_rpc.call_args == call('10.0.0.1', main_loop.env)
assert mocked_shutdown.call_count == 1
assert mocked_shutdown.call_args == call()
def test_comm_event(mocker, mocked_rpc, main_loop):
""" Test the protocol to send a comm event to the local Supervisor. """
mocker.patch('supvisors.mainloop.stderr')
# test rpc error
mocker.patch.object(main_loop.proxy.supervisor, 'sendRemoteCommEvent', side_effect=RPCError(100))
main_loop.send_remote_comm_event('event type', 'event data')
# test with a mocked rpc interface
mocked_supervisor = mocker.patch.object(main_loop.proxy.supervisor, 'sendRemoteCommEvent')
main_loop.send_remote_comm_event('event type', 'event data')
assert mocked_supervisor.call_args_list == [call('event type', 'event data')]
def check_call(main_loop, mocked_loop, method_name, request, args):
""" Perform a main loop request and check what has been called. """
# send request
main_loop.send_request(request.value, args)
# test mocked main loop
for key, mocked in mocked_loop.items():
if key == method_name:
assert mocked.call_count == 1
assert mocked.call_args == call(*args)
mocked.reset_mock()
else:
assert not mocked.called
def test_send_request(mocker, main_loop):
""" Test the execution of a deferred Supervisor request. """
# patch main loop subscriber
mocked_loop = mocker.patch.multiple(main_loop, check_node=DEFAULT,
start_process=DEFAULT, stop_process=DEFAULT,
restart=DEFAULT, shutdown=DEFAULT,
restart_all=DEFAULT, shutdown_all=DEFAULT)
# test check address
check_call(main_loop, mocked_loop, 'check_node',
DeferredRequestHeaders.CHECK_NODE, ('10.0.0.2',))
# test start process
check_call(main_loop, mocked_loop, 'start_process',
DeferredRequestHeaders.START_PROCESS, ('10.0.0.2', 'dummy_process', 'extra args'))
# test stop process
check_call(main_loop, mocked_loop, 'stop_process',
DeferredRequestHeaders.STOP_PROCESS, ('10.0.0.2', 'dummy_process'))
# test restart
check_call(main_loop, mocked_loop, 'restart',
DeferredRequestHeaders.RESTART, ('10.0.0.2',))
# test shutdown
check_call(main_loop, mocked_loop, 'shutdown',
DeferredRequestHeaders.SHUTDOWN, ('10.0.0.2',))
# test restart_all
check_call(main_loop, mocked_loop, 'restart_all',
DeferredRequestHeaders.RESTART_ALL, ('10.0.0.2',))
# test shutdown
check_call(main_loop, mocked_loop, 'shutdown_all',
DeferredRequestHeaders.SHUTDOWN_ALL, ('10.0.0.2',))
| 43.607735
| 110
| 0.705752
| 0
| 0
| 200
| 0.012669
| 297
| 0.018814
| 0
| 0
| 4,885
| 0.309451
|
49afc71691a68c9b40e3421c08e29b8368b54b60
| 2,815
|
py
|
Python
|
wolf_control/scripts/mission.py
|
ncsurobotics/SW8S-ROS
|
9f7f5811fe1a1a8d5d0de0b791ce757fcaeb5759
|
[
"MIT"
] | null | null | null |
wolf_control/scripts/mission.py
|
ncsurobotics/SW8S-ROS
|
9f7f5811fe1a1a8d5d0de0b791ce757fcaeb5759
|
[
"MIT"
] | null | null | null |
wolf_control/scripts/mission.py
|
ncsurobotics/SW8S-ROS
|
9f7f5811fe1a1a8d5d0de0b791ce757fcaeb5759
|
[
"MIT"
] | 1
|
2022-03-30T19:12:52.000Z
|
2022-03-30T19:12:52.000Z
|
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist, TransformStamped
from std_msgs.msg import String
from enum import Enum
import tf2_ros
import math
class mission_states(Enum):
STOP = -1
SUBMERGE = 0
MOVE_TO_GATE = 1
MOVE_THROUGH_GATE = 2
def checkTolerance(current, wanted):
tolerance = 0.1
return current < wanted + tolerance and current > wanted - tolerance
def mission():
rospy.init_node('mission_controller', anonymous=True)
state = mission_states.SUBMERGE
goal_pub = rospy.Publisher('wolf_control/goal', Twist, queue_size=10)
state_pub = rospy.Publisher('wolf_control/mission_state', String, queue_size=10)
tf_buffer = tf2_ros.Buffer()
listener = tf2_ros.TransformListener(tf_buffer)
rate = rospy.Rate(10) # 10hz
submerge_depth = -1.5
timer = 0
saved_goal = None
while not rospy.is_shutdown():
try:
odom: TransformStamped = tf_buffer.lookup_transform("odom", "base_link", rospy.Time(0))
if state == mission_states.STOP:
goal = Twist()
goal.linear.z = submerge_depth
goal_pub.publish(goal)
if state == mission_states.SUBMERGE:
goal = Twist()
goal.linear.z = submerge_depth
goal.angular.z = odom.transform.rotation.z
goal_pub.publish(goal)
if checkTolerance(odom.transform.translation.z, submerge_depth):
state = mission_states.MOVE_TO_GATE
timer = 0
saved_goal = None
elif state == mission_states.MOVE_TO_GATE:
gate_vector: TransformStamped = tf_buffer.lookup_transform("odom", "gate", rospy.Time(0))
goal = Twist()
goal.linear.x = gate_vector.transform.translation.x * 0.1
goal.linear.y = gate_vector.transform.translation.y * 0.1
goal.linear.z = submerge_depth
goal_pub.publish(goal)
if timer > 80:
saved_goal = goal
state = mission_states.MOVE_THROUGH_GATE
timer = 0
elif state == mission_states.MOVE_THROUGH_GATE:
goal_pub.publish(saved_goal)
if timer > 170:
timer = 0
saved_goal = None
state = mission_states.STOP
timer += 1
state_pub.publish(state.name)
except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException):
rospy.logerr("mission_code: error finding frame")
rate.sleep()
if __name__ == '__main__':
try:
mission()
except rospy.ROSInterruptException:
pass
| 38.040541
| 105
| 0.596448
| 105
| 0.0373
| 0
| 0
| 0
| 0
| 0
| 0
| 168
| 0.05968
|
49b0052d2675e4f9dc69452f3b5d084691e4a664
| 19,202
|
py
|
Python
|
tests/tests/test_api_management.py
|
MaciejTe/useradm
|
4962000db94bc7d9e80b81c4389f6f769d0d062a
|
[
"Apache-2.0"
] | 8
|
2017-02-27T08:58:08.000Z
|
2020-05-25T14:37:24.000Z
|
tests/tests/test_api_management.py
|
MaciejTe/useradm
|
4962000db94bc7d9e80b81c4389f6f769d0d062a
|
[
"Apache-2.0"
] | 263
|
2016-11-17T15:02:26.000Z
|
2022-03-31T10:04:09.000Z
|
tests/tests/test_api_management.py
|
MaciejTe/useradm
|
4962000db94bc7d9e80b81c4389f6f769d0d062a
|
[
"Apache-2.0"
] | 25
|
2016-11-16T15:45:38.000Z
|
2020-12-19T09:56:16.000Z
|
#!/usr/bin/python
# Copyright 2021 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common import (
init_users,
init_users_f,
init_users_mt,
init_users_mt_f,
cli,
api_client_mgmt,
mongo,
make_auth,
)
import bravado
import pytest
import tenantadm
class TestManagementApiPostUsersBase:
def _do_test_ok(self, api_client_mgmt, init_users, new_user, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
_, r = api_client_mgmt.create_user(new_user, auth)
assert r.status_code == 201
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users) + 1
found_user = [u for u in users if u.email == new_user["email"]]
assert len(found_user) == 1
found_user = found_user[0]
def _do_test_fail_unprocessable_entity(
self, api_client_mgmt, init_users, new_user, tenant_id=None
):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
api_client_mgmt.create_user(new_user, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
class TestManagementApiPostUsers(TestManagementApiPostUsersBase):
def test_ok(self, api_client_mgmt, init_users):
new_user = {"email": "foo@bar.com", "password": "asdf1234zxcv"}
self._do_test_ok(api_client_mgmt, init_users, new_user)
def test_fail_malformed_body(self, api_client_mgmt):
new_user = {"foo": "bar"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_no_password(self, api_client_mgmt):
new_user = {"email": "foobar"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_no_email(self, api_client_mgmt):
new_user = {"password": "asdf1234zxcv"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_not_an_email(self, api_client_mgmt):
new_user = {"email": "foobar", "password": "asdf1234zxcv"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def test_fail_pwd_too_short(self, api_client_mgmt):
new_user = {"email": "foo@bar.com", "password": "asdf"}
try:
api_client_mgmt.create_user(new_user)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
def test_fail_duplicate_email(self, api_client_mgmt, init_users):
new_user = {"email": "foo@bar.com", "password": "asdf"}
self._do_test_fail_unprocessable_entity(api_client_mgmt, init_users, new_user)
class TestManagementApiPostUsersEnterprise(TestManagementApiPostUsersBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
new_user = {"email": "foo@bar.com", "password": "asdf1234zxcv"}
with tenantadm.run_fake_create_user(new_user):
self._do_test_ok(
api_client_mgmt, init_users_mt[tenant_id], new_user, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_duplicate_email(self, tenant_id, api_client_mgmt, init_users_mt):
new_user = {"email": "foo@bar.com", "password": "asdf1234zxcv"}
with tenantadm.run_fake_create_user(new_user, 422):
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_mt[tenant_id], new_user, tenant_id
)
class TestManagementApiGetUserBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
for u in init_users:
found = api_client_mgmt.get_user(u.id, auth)
assert found.id == u.id
assert found.email == u.email
assert found.created_ts == u.created_ts
assert found.updated_ts == u.updated_ts
def _do_test_fail_not_found(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
not_found = api_client_mgmt.get_user("madeupid", auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 404
class TestManagementApiGetUser(TestManagementApiGetUserBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
def test_fail_not_found(self, api_client_mgmt, init_users):
self._do_test_fail_not_found(api_client_mgmt, init_users)
class TestManagementApiGetUserEnterprise(TestManagementApiGetUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_not_found(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_fail_not_found(
api_client_mgmt, init_users_mt[tenant_id], tenant_id
)
class TestManagementApiGetUsersBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
def _do_test_no_users(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
users = api_client_mgmt.get_users(auth)
assert len(users) == 0
class TestManagementApiGetUsersOk(TestManagementApiGetUsersBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
class TestManagementApiGetUsersNoUsers(TestManagementApiGetUsersBase):
def test_no_users(self, api_client_mgmt):
self._do_test_no_users(api_client_mgmt)
class TestManagementApiGetUsersEnterprise(TestManagementApiGetUsersBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_no_users(self, tenant_id, api_client_mgmt, init_users_mt):
self._do_test_no_users(api_client_mgmt, "non_existing_tenant_id")
class TestManagementApiDeleteUserBase:
def _do_test_ok(self, api_client_mgmt, init_users, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
rsp = api_client_mgmt.delete_user(init_users[0]["id"], auth)
assert rsp.status_code == 204
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users) - 1
found = [u for u in users if u.id == init_users[0]["id"]]
assert len(found) == 0
def _do_test_not_found(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
rsp = api_client_mgmt.delete_user("nonexistent_id", auth)
assert rsp.status_code == 204
class TestManagementApiDeleteUser(TestManagementApiDeleteUserBase):
def test_ok(self, api_client_mgmt, init_users):
self._do_test_ok(api_client_mgmt, init_users)
def test_not_found(self, api_client_mgmt, init_users):
self._do_test_not_found(api_client_mgmt)
class TestManagementApiDeleteUserEnterprise(TestManagementApiDeleteUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, tenant_id, api_client_mgmt, init_users_mt):
with tenantadm.run_fake_delete_user(
tenant_id, init_users_mt[tenant_id][0]["id"]
):
self._do_test_ok(api_client_mgmt, init_users_mt[tenant_id], tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_not_found(self, tenant_id, api_client_mgmt):
with tenantadm.run_fake_delete_user():
self._do_test_not_found(api_client_mgmt, tenant_id)
class TestManagementApiPutUserBase:
def _do_test_ok_email(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
# test update
_, r = api_client_mgmt.update_user(user.id, update, auth)
assert r.status_code == 204
# get/verify users
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
found = [u for u in users if u.email == update["email"]]
assert len(found) == 1
def _do_test_ok_email_or_pass(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
# test update
_, r = api_client_mgmt.update_user(user.id, update, auth)
assert r.status_code == 204
# get/verify users
users = api_client_mgmt.get_users(auth)
assert len(users) == len(init_users)
# find the user via (new?) email
email = user.email
new_email = update.get("email", None)
if new_email != None and new_email != user.email:
email = new_email
found = [u for u in users if u.email == email]
assert len(found) == 1
# try if login still works
_, r = api_client_mgmt.login(email, update["password"])
assert r.status_code == 200
def _do_test_fail_not_found(
self, api_client_mgmt, init_users, update, tenant_id=None
):
_, r = api_client_mgmt.login(init_users[0].email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
try:
_, r = api_client_mgmt.update_user("madeupid", update, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 404
def _do_test_fail_bad_update(self, api_client_mgmt, init_users, tenant_id=None):
try:
_, r = api_client_mgmt.update_user(init_users[0].id, {"foo": "bar"})
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
def _do_test_fail_unprocessable_entity(
self, api_client_mgmt, init_users, user, update, tenant_id=None
):
_, r = api_client_mgmt.login(user.email, "correcthorsebatterystaple")
assert r.status_code == 200
token = r.text
auth = {"Authorization": "Bearer " + token}
try:
_, r = api_client_mgmt.update_user(user.id, update, auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 422
class TestManagementApiPutUser(TestManagementApiPutUserBase):
def test_ok_email(self, api_client_mgmt, init_users_f):
update = {"email": "unique1@foo.com"}
self._do_test_ok_email(api_client_mgmt, init_users_f, init_users_f[0], update)
def test_ok_pass(self, api_client_mgmt, init_users_f):
update = {
"current_password": "correcthorsebatterystaple",
"password": "secretpassword123",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_ok_email_and_pass(self, api_client_mgmt, init_users_f):
update = {
"email": "definitelyunique@foo.com",
"current_password": "correcthorsebatterystaple",
"password": "secretpassword123",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_fail_password_mismatch(self, api_client_mgmt, init_users_f):
update = {"current_password": "dummy", "password": "secretpassword123"}
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_f, init_users_f[0], update
)
def test_fail_not_found(self, api_client_mgmt, init_users_f):
update = {"email": "foo@bar.com", "password": "secretpassword123"}
self._do_test_fail_not_found(api_client_mgmt, init_users_f, update)
def test_fail_bad_update(self, api_client_mgmt, init_users_f):
self._do_test_fail_bad_update(api_client_mgmt, init_users_f)
def test_fail_duplicate_email(self, api_client_mgmt, init_users_f):
update = {"email": init_users_f[1].email, "password": "secretpassword123"}
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_f, init_users_f[0], update
)
class TestManagementApiPutUserEnterprise(TestManagementApiPutUserBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_email(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][0]
update = {"email": "unique1@foo.com"}
with tenantadm.run_fake_update_user(tenant_id, user.id, update):
self._do_test_ok_email(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_pass(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][1]
with tenantadm.run_fake_get_tenants(tenant_id):
update = {
"password": "secretpassword123",
"current_password": "correcthorsebatterystaple",
}
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok_email_and_pass(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][2]
update = {
"email": "definitelyunique@foo.com",
"current_password": "correcthorsebatterystaple",
"password": "secretpassword123",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update):
self._do_test_ok_email_or_pass(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_not_found(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][3]
update = {
"email": "foo@bar.com",
"current_password": "correcthorsebatterystaple",
"password": "secretpassword123",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update, 404):
self._do_test_fail_not_found(
api_client_mgmt, init_users_mt_f[tenant_id], update, tenant_id
)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_bad_update(self, api_client_mgmt, init_users_mt_f, tenant_id):
self._do_test_fail_bad_update(api_client_mgmt, init_users_mt_f[tenant_id])
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_fail_duplicate_email(self, api_client_mgmt, init_users_mt_f, tenant_id):
user = init_users_mt_f[tenant_id][0]
update = {
"email": init_users_mt_f[tenant_id][1].email,
"password": "secretpassword123",
}
with tenantadm.run_fake_update_user(tenant_id, user.id, update, 422):
self._do_test_fail_unprocessable_entity(
api_client_mgmt, init_users_mt_f[tenant_id], user, update, tenant_id
)
class TestManagementApiSettingsBase:
def _do_test_ok(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
# nonempty
self._set_and_verify(
{"foo": "foo-val", "bar": "bar-val"}, api_client_mgmt, auth
)
# empty
self._set_and_verify({}, api_client_mgmt, auth)
def _do_test_no_settings(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
found = api_client_mgmt.get_settings(auth)
assert found.json() == {}
def _set_and_verify(self, settings, api_client_mgmt, auth):
r = api_client_mgmt.post_settings(settings, auth)
assert r.status_code == 201
found = api_client_mgmt.get_settings(auth)
assert found.json() == settings
def _do_test_fail_bad_request(self, api_client_mgmt, tenant_id=None):
auth = None
if tenant_id is not None:
auth = make_auth("foo", tenant_id)
try:
r = api_client_mgmt.post_settings("asdf", auth)
except bravado.exception.HTTPError as e:
assert e.response.status_code == 400
class TestManagementApiSettings(TestManagementApiSettingsBase):
def test_ok(self, api_client_mgmt):
self._do_test_ok(api_client_mgmt)
def test_no_settings(self, api_client_mgmt):
self._do_test_no_settings(api_client_mgmt)
def test_bad_request(self, api_client_mgmt):
self._do_test_fail_bad_request(api_client_mgmt)
class TestManagementApiSettingsEnterprise(TestManagementApiSettingsBase):
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_ok(self, api_client_mgmt, init_users_mt_f, tenant_id):
self._do_test_ok(api_client_mgmt, tenant_id)
@pytest.mark.parametrize("tenant_id", ["tenant1id", "tenant2id"])
def test_bad_request(self, api_client_mgmt, tenant_id):
self._do_test_fail_bad_request(api_client_mgmt, tenant_id)
| 38.327345
| 86
| 0.672638
| 18,321
| 0.954119
| 0
| 0
| 5,559
| 0.289501
| 0
| 0
| 2,653
| 0.138163
|
49b03158777693b6348d205c910ad771b55e53ea
| 1,167
|
py
|
Python
|
scripts/convert_to_bed.py
|
Lila14/multimds
|
e54642e0ae47592321352f931f534881ca57d888
|
[
"MIT"
] | 1
|
2019-10-29T12:33:57.000Z
|
2019-10-29T12:33:57.000Z
|
scripts/convert_to_bed.py
|
Lila14/multimds
|
e54642e0ae47592321352f931f534881ca57d888
|
[
"MIT"
] | null | null | null |
scripts/convert_to_bed.py
|
Lila14/multimds
|
e54642e0ae47592321352f931f534881ca57d888
|
[
"MIT"
] | null | null | null |
import os
chrom_bins = {}
with open("GSE88952_Sc_Su.32000.bed") as in_file:
for line in in_file:
line = line.strip().split()
chrom_bins[line[3]] = "{}\t{}\t{}".format(line[0], line[1], line[2])
in_file.close()
if not os.path.isfile("ctrl_32kb.bed"):
with open("ctrl_32kb.bed", "w") as out_file:
with open("ctrl_32kb_matrix.txt") as in_file:
for line in in_file:
line = line.strip().split()
bin1 = line[0]
chrom_string1 = chrom_bins[bin1]
bin2 = line[1]
chrom_string2 = chrom_bins[bin2]
if float(line[3]) != 0:
out_file.write("\t".join((chrom_string1, chrom_string2, line[3])))
out_file.write("\n")
in_file.close()
out_file.close()
if not os.path.isfile("galactose_32kb.bed"):
with open("galactose_32kb.bed", "w") as out_file:
with open("galactose_32kb_matrix.txt") as in_file:
for line in in_file:
line = line.strip().split()
bin1 = line[0]
chrom_string1 = chrom_bins[bin1]
bin2 = line[1]
chrom_string2 = chrom_bins[bin2]
if float(line[3]) != 0:
out_file.write("\t".join((chrom_string1, chrom_string2, line[3])))
out_file.write("\n")
in_file.close()
out_file.close()
| 29.175
| 71
| 0.652956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 179
| 0.153385
|
49b1fd488e00bd5cbf7211a994c7ac528083422a
| 21,956
|
py
|
Python
|
xinshuo_visualization/prob_stat_vis.py
|
xinshuoweng/Xinshuo_PyToolbox
|
ce4cf0398f24c5a611af9d94dc0bf2a9104a3716
|
[
"MIT"
] | 31
|
2020-03-05T12:27:21.000Z
|
2022-03-07T04:00:18.000Z
|
xinshuo_visualization/prob_stat_vis.py
|
xinshuoweng/Xinshuo_PyToolbox
|
ce4cf0398f24c5a611af9d94dc0bf2a9104a3716
|
[
"MIT"
] | null | null | null |
xinshuo_visualization/prob_stat_vis.py
|
xinshuoweng/Xinshuo_PyToolbox
|
ce4cf0398f24c5a611af9d94dc0bf2a9104a3716
|
[
"MIT"
] | 12
|
2020-07-06T05:06:58.000Z
|
2021-11-18T14:43:20.000Z
|
# Author: Xinshuo Weng
# email: xinshuo.weng@gmail.com
import matplotlib.pyplot as plt, numpy as np
# import seaborn as sns
# from pandas import DataFrame
# from sklearn.neighbors import NearestNeighbors
from terminaltables import AsciiTable
from collections import Counter
from .private import save_vis_close_helper, get_fig_ax_helper
from xinshuo_miscellaneous import isdict, islogical, is_path_exists, isscalar, islist, is_path_exists_or_creatable, CHECK_EQ_LIST_UNORDERED, isnparray, isinteger, isstring, scalarlist2strlist, islistoflist, iscolorimage_dimension, isgrayimage_dimension, istuple
from xinshuo_math import calculate_truncated_mse
color_set = ['r', 'b', 'g', 'c', 'm', 'y', 'k', 'w', 'lime', 'cyan', 'aqua']
linestyle_set = ['-', '--', '-.', ':', None, ' ', 'solid', 'dashed']
dpi = 80
def visualize_ced(normed_mean_error_dict, error_threshold, normalized=True, truncated_list=None, display2terminal=True, display_list=None, title='2D PCK curve', debug=True, vis=False, pck_savepath=None, table_savepath=None, closefig=True):
'''
visualize the cumulative error distribution curve (alse called NME curve or pck curve)
all parameters are represented by percentage
parameter:
normed_mean_error_dict: a dictionary whose keys are the method name and values are (N, ) numpy array to represent error in evaluation
error_threshold: threshold to display in x axis
return:
AUC: area under the curve
MSE: mean square error
'''
if debug:
assert isdict(normed_mean_error_dict), 'the input normalized mean error dictionary is not correct'
assert islogical(normalized), 'the normalization flag should be logical'
if normalized: assert error_threshold > 0 and error_threshold < 100, 'threshold percentage is not well set'
if save:
assert is_path_exists_or_creatable(pck_savepath), 'please provide a valid path to save the pck results'
assert is_path_exists_or_creatable(table_savepath), 'please provide a valid path to save the table results'
assert isstring(title), 'title is not correct'
if truncated_list is not None: assert islistofscalar(truncated_list), 'the input truncated list is not correct'
if display_list is not None:
assert islist(display_list) and len(display_list) == len(normed_mean_error_dict), 'the input display list is not correct'
assert CHECK_EQ_LIST_UNORDERED(display_list, normed_mean_error_dict.keys(), debug=debug), 'the input display list does not match the error dictionary key list'
else: display_list = normed_mean_error_dict.keys()
# set display parameters
width, height = 1000, 800
legend_fontsize = 10
scale_distance = 48.8
line_index, color_index = 0, 0
figsize = width / float(dpi), height / float(dpi)
fig = plt.figure(figsize=figsize)
# set figure handle
num_bins = 1000
if normalized:
maximum_x = 1
scale = num_bins / 100
else:
maximum_x = error_threshold + 1
scale = num_bins / maximum_x
x_axis = np.linspace(0, maximum_x, num_bins) # error axis, percentage of normalization factor
y_axis = np.zeros(num_bins)
interval_y = 10
interval_x = 1
plt.xlim(0, error_threshold)
plt.ylim(0, 100)
plt.yticks(np.arange(0, 100 + interval_y, interval_y))
plt.xticks(np.arange(0, error_threshold + interval_x, interval_x))
plt.grid()
plt.title(title, fontsize=20)
if normalized: plt.xlabel('Normalized error euclidean distance (%)', fontsize=16)
else: plt.xlabel('Absolute error euclidean distance', fontsize=16)
# calculate metrics for each method
num_methods = len(normed_mean_error_dict)
num_images = len(normed_mean_error_dict.values()[0])
metrics_dict = dict()
metrics_table = list()
table_title = ['Method Name / Metrics', 'AUC', 'MSE']
append2title = False
assert num_images > 0, 'number of error array should be larger than 0'
for ordered_index in range(num_methods):
method_name = display_list[ordered_index]
normed_mean_error = normed_mean_error_dict[method_name]
if debug:
assert isnparray(normed_mean_error) and normed_mean_error.ndim == 1, 'shape of error distance is not good'
assert len(normed_mean_error) == num_images, 'number of testing images should be equal for all methods'
assert len(linestyle_set) * len(color_set) >= len(normed_mean_error_dict)
color_tmp = color_set[color_index]
line_tmp = linestyle_set[line_index]
for i in range(num_bins):
y_axis[i] = float((normed_mean_error < x_axis[i]).sum()) / num_images # percentage of error
# calculate area under the curve and mean square error
entry = dict()
entry['AUC'] = np.sum(y_axis[:error_threshold * scale]) / (error_threshold * scale) # bigger, better
entry['MSE'] = np.mean(normed_mean_error) # smaller, better
metrics_table_tmp = [str(method_name), '%.2f' % (entry['AUC']), '%.1f' % (entry['MSE'])]
if truncated_list is not None:
tmse_dict = calculate_truncated_mse(normed_mean_error.tolist(), truncated_list, debug=debug)
for threshold in truncated_list:
entry['AUC/%s'%threshold] = np.sum(y_axis[:error_threshold * scale]) / (error_threshold * scale) # bigger, better
entry['MSE/%s'%threshold] = tmse_dict[threshold]['T-MSE']
entry['percentage/%s'%threshold] = tmse_dict[threshold]['percentage']
if not append2title:
table_title.append('AUC/%s'%threshold)
table_title.append('MSE/%s'%threshold)
table_title.append('pct/%s'%threshold)
metrics_table_tmp.append('%.2f' % (entry['AUC/%s'%threshold]))
metrics_table_tmp.append('%.1f' % (entry['MSE/%s'%threshold]))
metrics_table_tmp.append('%.1f' % (100 * entry['percentage/%s'%threshold]) + '%')
# print metrics_table_tmp
metrics_table.append(metrics_table_tmp)
append2title = True
metrics_dict[method_name] = entry
# draw
label = '%s, AUC: %.2f, MSE: %.1f (%.0f um)' % (method_name, entry['AUC'], entry['MSE'], entry['MSE'] * scale_distance)
if normalized: plt.plot(x_axis*100, y_axis*100, color=color_tmp, linestyle=line_tmp, label=label, lw=3)
else: plt.plot(x_axis, y_axis*100, color=color_tmp, linestyle=line_tmp, label=label, lw=3)
plt.legend(loc=4, fontsize=legend_fontsize)
color_index += 1
if color_index / len(color_set) == 1:
line_index += 1
color_index = color_index % len(color_set)
# plt.grid()
plt.ylabel('{} Test Images (%)'.format(num_images), fontsize=16)
save_vis_close_helper(fig=fig, ax=None, vis=vis, transparent=False, save_path=pck_savepath, debug=debug, closefig=closefig)
# reorder the table
order_index_list = [display_list.index(method_name_tmp) for method_name_tmp in normed_mean_error_dict.keys()]
order_index_list = [0] + [order_index_tmp + 1 for order_index_tmp in order_index_list]
# print table to terminal
metrics_table = [table_title] + metrics_table
# metrics_table = list_reorder([table_title] + metrics_table, order_index_list, debug=debug)
table = AsciiTable(metrics_table)
if display2terminal:
print('\nprint detailed metrics')
print(table.table)
# save table to file
if table_savepath is not None:
table_file = open(table_savepath, 'w')
table_file.write(table.table)
table_file.close()
if display2terminal: print('\nsave detailed metrics to %s' % table_savepath)
return metrics_dict, metrics_table
def visualize_nearest_neighbor(featuremap_dict, num_neighbor=5, top_number=5, vis=True, save_csv=False, csv_save_path=None, save_vis=False, save_img=False, save_thumb_name='nearest_neighbor.png', img_src_folder=None, ext_filter='.jpg', nn_save_folder=None, debug=True):
'''
visualize nearest neighbor for featuremap from images
parameter:
featuremap_dict: a dictionary contains image path as key, and featuremap as value, the featuremap needs to be numpy array with any shape. No flatten needed
num_neighbor: number of neighbor to visualize, the first nearest is itself
top_number: number of top to visualize, since there might be tons of featuremap (length of dictionary), we choose the top ten with lowest distance with their nearest neighbor
csv_save_path: path to save .csv file which contains indices and distance array for all elements
nn_save_folder: save the nearest neighbor images for top featuremap
return:
all_sorted_nearest_id: a 2d matrix, each row is a feature followed by its nearest neighbor in whole feature dataset, the column is sorted by the distance of all nearest neighbor each row
selected_nearest_id: only top number of sorted nearest id
'''
print('processing feature map to nearest neightbor.......')
if debug:
assert isdict(featuremap_dict), 'featuremap should be dictionary'
assert all(isnparray(featuremap_tmp) for featuremap_tmp in featuremap_dict.values()), 'value of dictionary should be numpy array'
assert isinteger(num_neighbor) and num_neighbor > 1, 'number of neighborhodd is an integer larger than 1'
if save_csv and csv_save_path is not None:
assert is_path_exists_or_creatable(csv_save_path), 'path to save .csv file is not correct'
if save_vis or save_img:
if nn_save_folder is not None: # save image directly
assert isstring(ext_filter), 'extension filter is not correct'
assert is_path_exists(img_src_folder), 'source folder for image is not correct'
assert all(isstring(path_tmp) for path_tmp in featuremap_dict.keys()) # key should be the path for the image
assert is_path_exists_or_creatable(nn_save_folder), 'folder to save top visualized images is not correct'
assert isstring(save_thumb_name), 'name of thumbnail is not correct'
if ext_filter.find('.') == -1:
ext_filter = '.%s' % ext_filter
# flatten the feature map
nn_feature_dict = dict()
for key, featuremap_tmp in featuremap_dict.items():
nn_feature_dict[key] = featuremap_tmp.flatten()
num_features = len(nn_feature_dict)
# nearest neighbor
featuremap = np.array(nn_feature_dict.values())
nearbrs = NearestNeighbors(n_neighbors=num_neighbor, algorithm='ball_tree').fit(featuremap)
distances, indices = nearbrs.kneighbors(featuremap)
if debug:
assert featuremap.shape[0] == num_features, 'shape of feature map is not correct'
assert indices.shape == (num_features, num_neighbor), 'shape of indices is not correct'
assert distances.shape == (num_features, num_neighbor), 'shape of indices is not correct'
# convert the nearest indices for all featuremap to the key accordingly
id_list = nn_feature_dict.keys()
max_length = len(max(id_list, key=len)) # find the maximum length of string in the key
nearest_id = np.chararray(indices.shape, itemsize=max_length+1)
for x in range(nearest_id.shape[0]):
for y in range(nearest_id.shape[1]):
nearest_id[x, y] = id_list[indices[x, y]]
if debug:
assert list(nearest_id[:, 0]) == id_list, 'nearest neighbor has problem'
# sort the feature based on distance
print('sorting the feature based on distance')
featuremap_distance = np.sum(distances, axis=1)
if debug:
assert featuremap_distance.shape == (num_features, ), 'distance is not correct'
sorted_indices = np.argsort(featuremap_distance)
all_sorted_nearest_id = nearest_id[sorted_indices, :]
# save to the csv file
if save_csv and csv_save_path is not None:
print('Saving nearest neighbor result as .csv to path: %s' % csv_save_path)
with open(csv_save_path, 'w+') as file:
np.savetxt(file, distances, delimiter=',', fmt='%f')
np.savetxt(file, all_sorted_nearest_id, delimiter=',', fmt='%s')
file.close()
# choose the best to visualize
selected_sorted_indices = sorted_indices[0:top_number]
if debug:
for i in range(num_features-1):
assert featuremap_distance[sorted_indices[i]] < featuremap_distance[sorted_indices[i+1]], 'feature map is not well sorted based on distance'
selected_nearest_id = nearest_id[selected_sorted_indices, :]
if save_vis:
fig, axarray = plt.subplots(top_number, num_neighbor)
for index in range(top_number):
for nearest_index in range(num_neighbor):
img_path = os.path.join(img_src_folder, '%s%s'%(selected_nearest_id[index, nearest_index], ext_filter))
if debug:
print('loading image from %s'%img_path)
img = imread(img_path)
if isgrayimage_dimension(img):
axarray[index, nearest_index].imshow(img, cmap='gray')
elif iscolorimage_dimension(img):
axarray[index, nearest_index].imshow(img)
else:
assert False, 'unknown error'
axarray[index, nearest_index].axis('off')
save_thumb = os.path.join(nn_save_folder, save_thumb_name)
fig.savefig(save_thumb)
if vis:
plt.show()
plt.close(fig)
# save top visualization to the folder
if save_img and nn_save_folder is not None:
for top_index in range(top_number):
file_list = selected_nearest_id[top_index]
save_subfolder = os.path.join(nn_save_folder, file_list[0])
mkdir_if_missing(save_subfolder)
for file_tmp in file_list:
file_src = os.path.join(img_src_folder, '%s%s'%(file_tmp, ext_filter))
save_path = os.path.join(save_subfolder, '%s%s'%(file_tmp, ext_filter))
if debug:
print('saving %s to %s' % (file_src, save_path))
shutil.copyfile(file_src, save_path)
return all_sorted_nearest_id, selected_nearest_id
def visualize_distribution(data, bin_size=None, vis=False, save_path=None, debug=True, closefig=True):
'''
visualize the histogram of a data, which can be a dictionary or list or numpy array or tuple or a list of list
'''
if debug:
assert istuple(data) or isdict(data) or islist(data) or isnparray(data), 'input data is not correct'
# convert data type
if istuple(data):
data = list(data)
elif isdict(data):
data = data.values()
elif isnparray(data):
data = data.tolist()
num_bins = 1000.0
fig, ax = get_fig_ax_helper(fig=None, ax=None)
# calculate bin size
if bin_size is None:
if islistoflist(data):
max_value = np.max(np.max(data))
min_value = np.min(np.min(data))
else:
max_value = np.max(data)
min_value = np.min(data)
bin_size = (max_value - min_value) / num_bins
else:
try:
bin_size = float(bin_size)
except TypeError:
print('size of bin should be an float value')
# plot
if islistoflist(data):
max_value = np.max(np.max(data))
min_value = np.min(np.min(data))
bins = np.arange(min_value - bin_size, max_value + bin_size, bin_size) # fixed bin size
plt.xlim([min_value - bin_size, max_value + bin_size])
for data_list_tmp in data:
if debug:
assert islist(data_list_tmp), 'the nested list is not correct!'
# plt.hist(data_list_tmp, bins=bins, alpha=0.3)
sns.distplot(data_list_tmp, bins=bins, kde=False)
# sns.distplot(data_list_tmp, bins=bins, kde=False)
else:
bins = np.arange(min(data) - 10 * bin_size, max(data) + 10 * bin_size, bin_size) # fixed bin size
plt.xlim([min(data) - bin_size, max(data) + bin_size])
plt.hist(data, bins=bins, alpha=0.5)
plt.title('distribution of data')
plt.xlabel('data (bin size = %f)' % bin_size)
plt.ylabel('count')
return save_vis_close_helper(fig=fig, ax=ax, vis=vis, save_path=save_path, debug=debug, closefig=closefig)
def visualize_bar(data, bin_size=2.0, title='Bar Graph of Key-Value Pair', xlabel='index', ylabel='count', vis=True, save_path=None, debug=True, closefig=True):
'''
visualize the bar graph of a data, which can be a dictionary or list of dictionary
different from function of visualize_bar_graph, this function does not depend on panda and dataframe, it's simpler but with less functionality
also the key of this function takes continuous scalar variable
'''
if debug:
assert isstring(title) and isstring(xlabel) and isstring(ylabel), 'title/xlabel/ylabel is not correct'
assert isdict(data) or islist(data), 'input data is not correct'
assert isscalar(bin_size), 'the bin size is not a floating number'
if isdict(data):
index_list = data.keys()
if debug:
assert islistofscalar(index_list), 'the input dictionary does not contain a scalar key'
frequencies = data.values()
else:
index_list = range(len(data))
frequencies = data
index_str_list = scalarlist2strlist(index_list, debug=debug)
index_list = np.array(index_list)
fig, ax = get_fig_ax_helper(fig=None, ax=None)
# ax.set_xticks(index_list)
# ax.set_xticklabels(index_str_list)
plt.bar(index_list, frequencies, bin_size, color='r', alpha=0.5)
plt.title(title, fontsize=20)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
return save_vis_close_helper(fig=fig, ax=ax, vis=vis, save_path=save_path, debug=debug, transparent=False, closefig=closefig)
def visualize_bar_graph(data, title='Bar Graph of Key-Value Pair', xlabel='pixel error', ylabel='keypoint index', label=False, label_list=None, vis=True, save_path=None, debug=True, closefig=True):
'''
visualize the bar graph of a data, which can be a dictionary or list of dictionary
inside each dictionary, the keys (string) should be the same which is the y label, the values should be scalar
'''
if debug:
assert isstring(title) and isstring(xlabel) and isstring(ylabel), 'title/xlabel/ylabel is not correct'
assert isdict(data) or islistofdict(data), 'input data is not correct'
if isdict(data):
assert all(isstring(key_tmp) for key_tmp in data.keys()), 'the keys are not all strings'
assert all(isscalar(value_tmp) for value_tmp in data.values()), 'the keys are not all strings'
else:
assert len(data) <= len(color_set), 'number of data set is larger than number of color to use'
keys = sorted(data[0].keys())
for dict_tmp in data:
if not (sorted(dict_tmp.keys()) == keys):
print(dict_tmp.keys())
print(keys)
assert False, 'the keys are not equal across different input set'
assert all(isstring(key_tmp) for key_tmp in dict_tmp.keys()), 'the keys are not all strings'
assert all(isscalar(value_tmp) for value_tmp in dict_tmp.values()), 'the values are not all scalars'
# convert dictionary to DataFrame
data_new = dict()
if isdict(data):
key_list = data.keys()
sorted_index = sorted(range(len(key_list)), key=lambda k: key_list[k])
data_new['names'] = (np.asarray(key_list)[sorted_index]).tolist()
data_new['values'] = (np.asarray(data.values())[sorted_index]).tolist()
else:
key_list = data[0].keys()
sorted_index = sorted(range(len(key_list)), key=lambda k: key_list[k])
data_new['names'] = (np.asarray(key_list)[sorted_index]).tolist()
num_sets = len(data)
for set_index in range(num_sets):
data_new['value_%03d'%set_index] = (np.asarray(data[set_index].values())[sorted_index]).tolist()
dataframe = DataFrame(data_new)
# plot
width = 2000
height = 2000
alpha = 0.5
figsize = width / float(dpi), height / float(dpi)
fig = plt.figure(figsize=figsize)
sns.set(style='whitegrid')
# fig, ax = get_fig_ax_helper(fig=None, ax=None)
if isdict(data):
g = sns.barplot(x='values', y='names', data=dataframe, label='data', color='b')
plt.legend(ncol=1, loc='lower right', frameon=True, fontsize=5)
else:
num_sets = len(data)
for set_index in range(num_sets):
if set_index == 0:
sns.set_color_codes('pastel')
else:
sns.set_color_codes('muted')
if label:
sns.barplot(x='value_%03d'%set_index, y='names', data=dataframe, label=label_list[set_index], color=color_set[set_index], alpha=alpha)
else:
sns.barplot(x='value_%03d'%set_index, y='names', data=dataframe, color=solor_set[set_index], alpha=alpha)
plt.legend(ncol=len(data), loc='lower right', frameon=True, fontsize=5)
sns.despine(left=True, bottom=True)
plt.title(title, fontsize=20)
plt.xlim([0, 50])
plt.xlabel(xlabel)
plt.ylabel(ylabel)
num_yticks = len(data_new['names'])
adaptive_fontsize = -0.0555556 * num_yticks + 15.111
plt.yticks(fontsize=adaptive_fontsize)
return save_vis_close_helper(fig=fig, vis=vis, save_path=save_path, debug=debug, closefig=closefig)
| 49.674208
| 269
| 0.660867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 6,134
| 0.279377
|
49b2849f5a27a9f4b798aac2f6c1149060ada338
| 96
|
py
|
Python
|
first_project/pizza_store/apps.py
|
itamaro/django-zero-to-cloud
|
0b0a4f75bf6a27855b00a88aebf93471a38e0c3c
|
[
"Apache-2.0"
] | null | null | null |
first_project/pizza_store/apps.py
|
itamaro/django-zero-to-cloud
|
0b0a4f75bf6a27855b00a88aebf93471a38e0c3c
|
[
"Apache-2.0"
] | null | null | null |
first_project/pizza_store/apps.py
|
itamaro/django-zero-to-cloud
|
0b0a4f75bf6a27855b00a88aebf93471a38e0c3c
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class PizzaStoreConfig(AppConfig):
name = 'pizza_store'
| 16
| 34
| 0.770833
| 59
| 0.614583
| 0
| 0
| 0
| 0
| 0
| 0
| 13
| 0.135417
|
49b384eb266010cb19d2dcb98f62539f08a56ecd
| 2,530
|
py
|
Python
|
bin/sweep_rhoref.py
|
lukaselflein/sarah_folderstructure
|
a725271db3d8b5b28b24918b3daf0942fa04dcd8
|
[
"MIT"
] | null | null | null |
bin/sweep_rhoref.py
|
lukaselflein/sarah_folderstructure
|
a725271db3d8b5b28b24918b3daf0942fa04dcd8
|
[
"MIT"
] | 28
|
2019-03-29T13:34:57.000Z
|
2019-07-04T09:27:07.000Z
|
bin/sweep_rhoref.py
|
lukaselflein/sarah_folderstructure
|
a725271db3d8b5b28b24918b3daf0942fa04dcd8
|
[
"MIT"
] | null | null | null |
"""Vary the rhoref parameter to find a sane value.
Copyright 2019 Simulation Lab
University of Freiburg
Author: Lukas Elflein <elfleinl@cs.uni-freiburg.de>
"""
from __future__ import print_function
import os
import shutil
#import multiprocessing
#import sys
import random
from loop_cost_functions import calc_cost_function
from smamp.tools import cd
from smamp.tools import check_existence
def testprint(*args, **kwargs):
return 'args: {}, kwargs: {}'.format(args, kwargs)
def get_tasks(path_to_subdir):
"""Vary the lnrho weighting parameter, create folder and execute."""
sweep_dir = 'lnrho_sweep'
if os.path.exists(sweep_dir):
# print('Removing old dir')
# shutil.rmtree(sweep_dir)
pass
else:
print('making dir')
os.mkdir(sweep_dir)
print('dir made.')
tasks = []
skipped = 0
for sigma in [0.2, 0.4, 0.6, 0.8, 1.0, 1.2, 1.4]:
#for lnrho in [-9, -8, -7, -6, -5, -4, -3, -2]:
for lnrho in [-7, -6, -5.5, -5, -4.75, -4.5, -4.25, -4, -3.5]:
output_name = os.path.join(sweep_dir, 'cost_{}_{}.h5'.format(lnrho, sigma))
if os.path.exists(output_name):
# print('{} exists. Do not include in worklist.'.format(output_name))
skipped += 1
continue
else:
tasks += [(path_to_subdir, lnrho, sigma, output_name)]
print('{} files found and skipped.'.format(skipped))
return tasks
def calculate_tasks(tasks):
print('{} items in worklist.'.format(len(tasks)))
random.shuffle(tasks)
for task in tasks:
if os.path.exists(task[-1]):
#print('{} exists. Skipping ahead.'.format(task[-1]))
continue
print('starting {} {} in {}'.format(task[1], task[2], task[0]))
with cd(task[0]):
calc_cost_function(*task)
def main():
""" Execute everything."""
print('This is {}.'.format(__file__))
print('Current working dir: {}'.format(os.getcwd()))
tasks = []
# Crawl the directory structure
for subdir, dirs, files in sorted(os.walk('.')):
# Exclude template folders from search
if 'template' in subdir or 'exclude' in subdir or 'lnrho_sweep' in subdir:
continue
# Select the folder to calculate in
if 'horton_cost_function' in subdir:
print('Moving to {}'.format(subdir))
with cd(subdir):
subdir_tasks = get_tasks(subdir)
tasks += subdir_tasks
calculate_tasks(tasks)
print('Done.')
if __name__ == '__main__':
main()
| 28.75
| 84
| 0.617787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 886
| 0.350198
|
49b5f0ea075bbb7b79a2d40b2e4b0bdffec0743f
| 12,388
|
py
|
Python
|
weasyl/test/web/test_site_updates.py
|
sl1-1/weasyl
|
d4f6bf3e33b85a2289a451d95d5b90ff24f5d539
|
[
"Apache-2.0"
] | 1
|
2019-02-15T04:21:48.000Z
|
2019-02-15T04:21:48.000Z
|
weasyl/test/web/test_site_updates.py
|
sl1-1/weasyl
|
d4f6bf3e33b85a2289a451d95d5b90ff24f5d539
|
[
"Apache-2.0"
] | 254
|
2017-12-23T19:36:43.000Z
|
2020-04-14T21:46:13.000Z
|
weasyl/test/web/test_site_updates.py
|
sl1-1/weasyl
|
d4f6bf3e33b85a2289a451d95d5b90ff24f5d539
|
[
"Apache-2.0"
] | 1
|
2017-12-23T18:42:16.000Z
|
2017-12-23T18:42:16.000Z
|
from __future__ import absolute_import, unicode_literals
import pytest
from libweasyl import staff
from libweasyl.legacy import UNIXTIME_OFFSET
from weasyl import errorcode
from weasyl import siteupdate
from weasyl.define import sessionmaker
from weasyl.test import db_utils
_FORM = {
u'title': u'Title',
u'content': u'Content',
}
@pytest.fixture(name='site_updates')
@pytest.mark.usefixtures('db')
def _site_updates():
user = db_utils.create_user(username='test_username')
updates = [
siteupdate.create(user, u'foo', u'content one'),
siteupdate.create(user, u'bar', u'content two'),
siteupdate.create(user, u'baz', u'content three'),
]
for update in updates:
sessionmaker().expunge(update)
return (user, updates)
@pytest.mark.usefixtures('db')
def test_select_last_empty(app):
assert siteupdate.select_last() is None
@pytest.mark.usefixtures('db')
def test_select_last(app, site_updates):
user, updates = site_updates
most_recent = updates[-1]
selected = siteupdate.select_last()
assert 'display_url' in selected.pop('user_media')['avatar'][0]
assert selected == {
'updateid': most_recent.updateid,
'userid': user,
'username': 'test_username',
'title': most_recent.title,
'content': most_recent.content,
'unixtime': most_recent.unixtime.timestamp + UNIXTIME_OFFSET,
'comment_count': 0,
}
@pytest.mark.usefixtures('db', 'cache')
def test_index_empty(app):
resp = app.get('/')
assert resp.html.find(id='home-content') is not None
assert resp.html.find(id='hc-update') is None
@pytest.mark.usefixtures('db', 'cache')
def test_index(app, site_updates):
_, updates = site_updates
resp = app.get('/')
update = resp.html.find(id='hc-update')
assert update is not None
assert update.h3.string == updates[-1].title
assert update.figure.img['alt'] == u'avatar of test_username'
@pytest.mark.usefixtures('db')
def test_list_empty(app):
resp = app.get('/site-updates/')
assert resp.html.find(None, 'content').p.string == u'No site updates to show.'
@pytest.mark.usefixtures('db')
def test_list(app, monkeypatch, site_updates):
_, updates = site_updates
resp = app.get('/site-updates/')
assert len(resp.html.findAll(None, 'text-post-item')) == 3
assert resp.html.find(None, 'text-post-actions') is None
assert len(resp.html.findAll(None, 'text-post-group-header')) == 1
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.get('/site-updates/', headers={'Cookie': cookie})
assert len(resp.html.findAll(None, 'text-post-item')) == 3
assert resp.html.find(None, 'text-post-actions').a['href'] == '/site-updates/%d/edit' % (updates[-1].updateid,)
@pytest.mark.usefixtures('db', 'no_csrf')
def test_create(app, monkeypatch):
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': cookie}).follow()
assert resp.html.find(None, 'content').h3.string == _FORM['title']
@pytest.mark.usefixtures('db', 'no_csrf')
def test_create_strip(app, monkeypatch):
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post(
'/admincontrol/siteupdate',
dict(_FORM, title=' test title \t '),
headers={'Cookie': cookie},
).follow()
assert resp.html.find(None, 'content').h3.string == u'test title'
@pytest.mark.usefixtures('db')
def test_create_csrf(app, monkeypatch):
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': cookie}, status=403)
assert resp.html.find(id='error_content').p.string == errorcode.token
@pytest.mark.usefixtures('db')
def test_create_restricted(app, monkeypatch):
resp = app.get('/admincontrol/siteupdate')
assert resp.html.find(id='error_content').contents[0].strip() == errorcode.unsigned
resp = app.post('/admincontrol/siteupdate', _FORM)
assert resp.html.find(id='error_content').contents[0].strip() == errorcode.unsigned
user = db_utils.create_user()
cookie = db_utils.create_session(user)
resp = app.get('/admincontrol/siteupdate', headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
monkeypatch.setattr(staff, 'TECHNICAL', frozenset([user]))
monkeypatch.setattr(staff, 'MODS', frozenset([user]))
resp = app.get('/admincontrol/siteupdate', headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.get('/admincontrol/siteupdate', headers={'Cookie': cookie})
assert resp.html.find(id='error_content') is None
@pytest.mark.usefixtures('db', 'no_csrf')
def test_create_validation(app, monkeypatch):
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/admincontrol/siteupdate', {'title': u'', 'content': u'Content'}, headers={'Cookie': cookie}, status=422)
assert resp.html.find(id='error_content').p.string == errorcode.error_messages['titleInvalid']
resp = app.post('/admincontrol/siteupdate', {'title': u'Title', 'content': u''}, headers={'Cookie': cookie}, status=422)
assert resp.html.find(id='error_content').p.string == errorcode.error_messages['contentInvalid']
@pytest.mark.usefixtures('db', 'no_csrf')
def test_create_notifications(app, monkeypatch):
admin_user = db_utils.create_user()
normal_user = db_utils.create_user()
admin_cookie = db_utils.create_session(admin_user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([admin_user]))
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': admin_cookie}).follow()
assert resp.html.find(None, 'content').h3.string == _FORM['title']
normal_cookie = db_utils.create_session(normal_user)
resp = app.get('/messages/notifications', headers={'Cookie': normal_cookie})
assert list(resp.html.find(id='header-messages').find(title='Notifications').stripped_strings)[1] == '1'
assert resp.html.find(id='site_updates').find(None, 'item').a.string == _FORM['title']
@pytest.mark.usefixtures('db', 'no_csrf')
def test_edit(app, monkeypatch, site_updates):
_, updates = site_updates
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), _FORM, headers={'Cookie': cookie}).follow()
assert resp.html.find(None, 'content').h3.string == _FORM['title']
@pytest.mark.usefixtures('db', 'no_csrf')
def test_edit_strip(app, monkeypatch, site_updates):
_, updates = site_updates
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post(
'/site-updates/%d' % (updates[-1].updateid,),
dict(_FORM, title=' test title \t '),
headers={'Cookie': cookie},
).follow()
assert resp.html.find(None, 'content').h3.string == u'test title'
@pytest.mark.usefixtures('db', 'no_csrf')
def test_edit_nonexistent(app, monkeypatch, site_updates):
_, updates = site_updates
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
app.post('/site-updates/%d' % (updates[-1].updateid + 1,), _FORM, headers={'Cookie': cookie}, status=404)
@pytest.mark.usefixtures('db')
def test_edit_csrf(app, monkeypatch, site_updates):
_, updates = site_updates
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), _FORM, headers={'Cookie': cookie}, status=403)
assert resp.html.find(id='error_content').p.string == errorcode.token
@pytest.mark.usefixtures('db')
def test_edit_restricted(app, monkeypatch, site_updates):
_, updates = site_updates
resp = app.get('/site-updates/%d/edit' % (updates[-1].updateid,))
assert resp.html.find(id='error_content').contents[0].strip() == errorcode.unsigned
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), _FORM)
assert resp.html.find(id='error_content').contents[0].strip() == errorcode.unsigned
user = db_utils.create_user()
cookie = db_utils.create_session(user)
resp = app.get('/site-updates/%d/edit' % (updates[-1].updateid,), headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), _FORM, headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
monkeypatch.setattr(staff, 'TECHNICAL', frozenset([user]))
monkeypatch.setattr(staff, 'MODS', frozenset([user]))
resp = app.get('/site-updates/%d/edit' % (updates[-1].updateid,), headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), _FORM, headers={'Cookie': cookie})
assert resp.html.find(id='error_content').p.string == errorcode.permission
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.get('/site-updates/%d/edit' % (updates[-1].updateid,), headers={'Cookie': cookie})
assert resp.html.find(id='error_content') is None
@pytest.mark.usefixtures('db', 'no_csrf')
def test_edit_validation(app, monkeypatch, site_updates):
_, updates = site_updates
user = db_utils.create_user()
cookie = db_utils.create_session(user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([user]))
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), {'title': u'', 'content': u'Content'}, headers={'Cookie': cookie}, status=422)
assert resp.html.find(id='error_content').p.string == errorcode.error_messages['titleInvalid']
resp = app.post('/site-updates/%d' % (updates[-1].updateid,), {'title': u'Title', 'content': u''}, headers={'Cookie': cookie}, status=422)
assert resp.html.find(id='error_content').p.string == errorcode.error_messages['contentInvalid']
@pytest.mark.usefixtures('db', 'no_csrf')
def test_edit_notifications(app, monkeypatch):
admin_user = db_utils.create_user()
normal_user = db_utils.create_user()
admin_cookie = db_utils.create_session(admin_user)
monkeypatch.setattr(staff, 'ADMINS', frozenset([admin_user]))
resp = app.post('/admincontrol/siteupdate', _FORM, headers={'Cookie': admin_cookie}).follow()
assert resp.html.find(None, 'content').h3.string == _FORM['title']
normal_cookie = db_utils.create_session(normal_user)
resp = app.get('/messages/notifications', headers={'Cookie': normal_cookie})
assert list(resp.html.find(id='header-messages').find(title='Notifications').stripped_strings)[1] == '1'
assert resp.html.find(id='site_updates').find(None, 'item').a.string == _FORM['title']
resp = app.post(
'/site-updates/%d' % (siteupdate.select_last()['updateid'],),
dict(_FORM, title=u'New title'),
headers={'Cookie': admin_cookie},
).follow()
assert resp.html.find(None, 'content').h3.string == u'New title'
resp = app.get('/messages/notifications', headers={'Cookie': normal_cookie})
assert list(resp.html.find(id='header-messages').find(title='Notifications').stripped_strings)[1] == '1'
assert resp.html.find(id='site_updates').find(None, 'item').a.string == u'New title'
| 39.705128
| 144
| 0.690265
| 0
| 0
| 0
| 0
| 11,985
| 0.967469
| 0
| 0
| 2,628
| 0.212141
|
49b63c647e63040901947f17755b744a1b67eb27
| 298
|
py
|
Python
|
17_Greedy/Step05/gamjapark.py
|
StudyForCoding/BEAKJOON
|
84e1c5e463255e919ccf6b6a782978c205420dbf
|
[
"MIT"
] | null | null | null |
17_Greedy/Step05/gamjapark.py
|
StudyForCoding/BEAKJOON
|
84e1c5e463255e919ccf6b6a782978c205420dbf
|
[
"MIT"
] | 3
|
2020-11-04T05:38:53.000Z
|
2021-03-02T02:15:19.000Z
|
17_Greedy/Step05/gamjapark.py
|
StudyForCoding/BEAKJOON
|
84e1c5e463255e919ccf6b6a782978c205420dbf
|
[
"MIT"
] | null | null | null |
import sys
N = int(sys.stdin.readline())
dis = list(map(int, sys.stdin.readline().split()))
coin = list(map(int, sys.stdin.readline().split()))
use_coin = coin[0]
tot = dis[0] * use_coin
for i in range(1, N - 1):
if coin[i] < use_coin:
use_coin = coin[i]
tot += dis[i] * use_coin
print(tot)
| 19.866667
| 51
| 0.64094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
49b84672d25848b03244c392641967f515178752
| 1,395
|
py
|
Python
|
examples/tx_rpc_client_ssl.py
|
jakm/txmsgpackrpc
|
9ff15fd7a7cd412d246d4e4937a5c56365f0d6be
|
[
"MIT"
] | 18
|
2015-01-19T15:27:02.000Z
|
2018-12-29T17:30:36.000Z
|
examples/tx_rpc_client_ssl.py
|
jakm/txmsgpackrpc
|
9ff15fd7a7cd412d246d4e4937a5c56365f0d6be
|
[
"MIT"
] | 6
|
2015-05-27T11:28:18.000Z
|
2016-12-19T06:35:55.000Z
|
examples/tx_rpc_client_ssl.py
|
jakm/txmsgpackrpc
|
9ff15fd7a7cd412d246d4e4937a5c56365f0d6be
|
[
"MIT"
] | 4
|
2015-03-24T22:18:27.000Z
|
2018-02-05T18:12:45.000Z
|
from twisted.internet import defer, reactor
@defer.inlineCallbacks
def main():
try:
from txmsgpackrpc.client import connect
c = yield connect('localhost', 8000, ssl=True, connectTimeout=5, waitTimeout=5)
data = {
'firstName': 'John',
'lastName': 'Smith',
'isAlive': True,
'age': 25,
'height_cm': 167.6,
'address': {
'streetAddress': "21 2nd Street",
"city": 'New York',
"state": 'NY',
'postalCode': '10021-3100'
},
'phoneNumbers': [
{
'type': 'home',
'number': '212 555-1234'
},
{
'type': 'office',
'number': '646 555-4567'
}
],
'children': [],
'spouse': None
}
res = yield c.createRequest('echo', data)
assert data == res
print res
except Exception:
import traceback
traceback.print_exc()
finally:
reactor.stop()
if __name__ == '__main__':
reactor.callWhenRunning(main)
reactor.run()
| 27.352941
| 87
| 0.387097
| 0
| 0
| 1,246
| 0.89319
| 1,269
| 0.909677
| 0
| 0
| 278
| 0.199283
|
49ba5224fd8503eb5f417c4656d1970b4252f78d
| 714
|
py
|
Python
|
currency_converter.py
|
patricianicolentan/currency-converters
|
e398796c99a0bb2a16fba9888baed0e289884237
|
[
"MIT"
] | null | null | null |
currency_converter.py
|
patricianicolentan/currency-converters
|
e398796c99a0bb2a16fba9888baed0e289884237
|
[
"MIT"
] | null | null | null |
currency_converter.py
|
patricianicolentan/currency-converters
|
e398796c99a0bb2a16fba9888baed0e289884237
|
[
"MIT"
] | null | null | null |
# Converts user-defined currencies using Google
import webbrowser, os, selenium
from selenium import webdriver
driver = webdriver.Firefox()
headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36'}
currencyX = input("Original Currency: ")
currencyYname = input("Output Currency: ")
currencyX_value = input("Value in " + currencyX + ": ")
URL = 'https://www.google.com/search?client=firefox-b-d&q=' + currencyX_value + ' ' + currencyX + ' to ' + currencyYname
driver.get(URL)
goal = driver.find_element_by_class_name('SwHCTb')
currencyY = goal.text
print('Value in ' + currencyYname + ': ' + currencyY)
| 39.666667
| 149
| 0.710084
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 322
| 0.45098
|
49bc8549c7944e60a8f4b2d3ccdc16b4d5329c4f
| 890
|
py
|
Python
|
SwitchTracer/universal/exceptions/__init__.py
|
IzayoiRin/VirtualVeyonST
|
d0c4035dba81d02135ad54f4c5a5d463e95f7925
|
[
"MIT"
] | null | null | null |
SwitchTracer/universal/exceptions/__init__.py
|
IzayoiRin/VirtualVeyonST
|
d0c4035dba81d02135ad54f4c5a5d463e95f7925
|
[
"MIT"
] | null | null | null |
SwitchTracer/universal/exceptions/__init__.py
|
IzayoiRin/VirtualVeyonST
|
d0c4035dba81d02135ad54f4c5a5d463e95f7925
|
[
"MIT"
] | null | null | null |
class UniErrors(Exception):
pass
class SetupErrors(UniErrors):
pass
class SettingErrors(UniErrors):
pass
class ConfigureSyntaxErrors(UniErrors):
pass
class NoLocationErrors(UniErrors):
pass
class ImportedErrors(UniErrors):
pass
class KernelWaresSettingsErrors(UniErrors):
pass
class RegisterErrors(UniErrors):
pass
class ResoluterErrors(UniErrors):
pass
class VolumeErrors(UniErrors):
pass
class ConnectionErrors(UniErrors):
pass
class RedisOperationErrors(UniErrors):
pass
class SerializerSettingErrors(UniErrors):
pass
class SerializerValidationErrors(UniErrors):
pass
class ParserSettingErrors(UniErrors):
pass
class ContentTypeErrors(UniErrors):
pass
class IllegalParametersErrors(UniErrors):
pass
class CodingErrors(UniErrors):
pass
class AppRuntimeErrors(UniErrors):
pass
| 11.866667
| 44
| 0.746067
| 835
| 0.938202
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
49bc98db6539f3a16066fd5753ae5ccc2e439eb8
| 1,107
|
py
|
Python
|
tests/test_dht.py
|
fakegit/stilio
|
cf198b8ccadc7dcadc462ce83b801af00ef4e2f2
|
[
"Apache-2.0"
] | 71
|
2019-10-09T17:18:12.000Z
|
2022-02-26T12:15:53.000Z
|
tests/test_dht.py
|
zinsking/stilio
|
eade3c1993e185bef53fa25b4e12fe8be330251c
|
[
"Apache-2.0"
] | 3
|
2019-10-16T17:52:48.000Z
|
2021-12-01T16:50:18.000Z
|
tests/test_dht.py
|
zinsking/stilio
|
eade3c1993e185bef53fa25b4e12fe8be330251c
|
[
"Apache-2.0"
] | 11
|
2020-01-21T09:09:14.000Z
|
2022-03-27T12:05:36.000Z
|
from stilio.crawler.dht.node import Node
class TestNode:
def setup_method(self):
self.node = Node.create_random("192.168.1.1", 8000)
def test_create_random(self) -> None:
assert self.node.address == "192.168.1.1"
assert self.node.port == 8000
def test_generate_random_id(self) -> None:
assert len(Node.generate_random_id()) == 20
def test_hex_id(self) -> None:
assert self.node.hex_id == self.node.nid.hex()
def test_eq(self) -> None:
random_id = Node.generate_random_id()
assert Node(random_id, "192.168.1.1", 8000) == Node(
random_id, "192.168.1.1", 8000
)
assert Node(random_id, "192.168.1.2", 8000) != Node(
random_id, "192.168.1.1", 8000
)
assert Node(random_id, "192.168.1.1", 8000) != Node(
random_id, "192.168.1.1", 8001
)
assert Node(random_id, "192.168.1.1", 8000) != Node(
Node.generate_random_id(), "192.168.1.1", 8001
)
def test_repr(self) -> None:
assert repr(self.node) == self.node.nid.hex()
| 31.628571
| 60
| 0.581752
| 1,063
| 0.960253
| 0
| 0
| 0
| 0
| 0
| 0
| 130
| 0.117435
|
49bd3fd869f70ef4d24196d954aa248d999405b6
| 714
|
py
|
Python
|
04_threading_yield.py
|
BiAPoL/online_image_processing_napari
|
680d9ceeef5ae188541a96c7125f0fca07f28af5
|
[
"Unlicense"
] | 2
|
2021-05-10T13:44:15.000Z
|
2022-03-16T20:20:39.000Z
|
04_threading_yield.py
|
BiAPoL/online_image_processing_napari
|
680d9ceeef5ae188541a96c7125f0fca07f28af5
|
[
"Unlicense"
] | 1
|
2021-05-17T16:11:54.000Z
|
2021-05-19T19:38:50.000Z
|
04_threading_yield.py
|
BiAPoL/online_image_processing_napari
|
680d9ceeef5ae188541a96c7125f0fca07f28af5
|
[
"Unlicense"
] | 2
|
2021-05-17T16:36:12.000Z
|
2022-03-18T15:07:14.000Z
|
import napari
import time
from napari._qt.qthreading import thread_worker
import numpy as np
# create a viewer window
viewer = napari.Viewer()
# https://napari.org/guides/stable/threading.html
@thread_worker
def loop_run():
while True: # endless loop
print("Hello world", time.time())
time.sleep(0.5)
yield np.random.random((2, 2))
def update_layer(image):
"""
Updates the image in the layer 'result'
or adds this layer.
"""
try:
viewer.layers['result'].data = image
except KeyError:
viewer.add_image(image, name='result')
# Start the loop
worker = loop_run()
worker.yielded.connect(update_layer)
worker.start()
# Start napari
napari.run()
| 20.4
| 49
| 0.676471
| 0
| 0
| 151
| 0.211485
| 166
| 0.232493
| 0
| 0
| 225
| 0.315126
|
49bec7c54696e35577e6576d879d884656bd76e8
| 1,937
|
py
|
Python
|
wordonhd/ApiException.py
|
Mechazawa/WordOn-HD-Bot
|
d5a9dedd3d548ad1a9b33f49646e532bf511dd3e
|
[
"BSD-2-Clause"
] | null | null | null |
wordonhd/ApiException.py
|
Mechazawa/WordOn-HD-Bot
|
d5a9dedd3d548ad1a9b33f49646e532bf511dd3e
|
[
"BSD-2-Clause"
] | null | null | null |
wordonhd/ApiException.py
|
Mechazawa/WordOn-HD-Bot
|
d5a9dedd3d548ad1a9b33f49646e532bf511dd3e
|
[
"BSD-2-Clause"
] | null | null | null |
from enum import Enum
from requests import Response
from urllib.parse import unquote
import json
class ApiErrorCode(Enum):
PHP_INVALID = 0
PHP_MISSING_PARAMS = 1
PHP_AUTH_FAILED = 2
PHP_NAME_INVALID = 4
PHP_USERNAME_INVALID = 5
PHP_USER_ALREADY_EXISTS = 6
PHP_PASSWORD_INVALID = 7
PHP_USER_NOT_FOUND = 8
PHP_WORD_INVALID = 9
PHP_USER_UNAUTH = 10
PHP_NAME_EXISTS = 11
PHP_ALREADY_HAS_ITEM = 12
PHP_NOT_ENOUGH_COINS = 13
PHP_MAX_NAMECHANGES = 14
PHP_USER_MAX_GAMES = 15
PHP_OTHER_USER_MAX_GAMES = 16
PHP_FB_ALREADY_EXISTS = 17
PHP_GAME_INVITE_ALREADY_SENT = 18
PHP_GET_LOCK_FAIL = 19
PHP_NOT_ENOUGH_STARS = 20
PHP_PAYMENT_APPROVAL = 21
PHP_MAX_HS = 22
PHP_USER_TYPE_INVALID = 23
PHP_MISSING_ITEM = 24
PHP_IS_FB_USER = 25
PHP_PROMOCODE_INVALID = 32
PHP_PROMOCODE_ONLY_NEW_PLAYERS = 33
PHP_PROMOCODE_ALREADY_REDEEMED = 34
PHP_DEFINITION_UNSUPPORTED = 48
PHP_DEFINITION_UNAVAILABLE = 49
PHP_DEFINITION_PARSE_ERROR = 50
POLL_INVALID_GAME = 237
POLL_INVALID_AUTH = 238
POLL_INVALID_REQUEST = 239
ALERT_MAX_GAMES = 1
ALERT_SNEAK_PEEK = 2
NULL_ERROR = 251
PARSE_ERROR = 252
SECURITY_ERROR = 253
IO_ERROR = 254
TIME_OUT_ERROR = 255
class ApiException(Exception):
def __init__(self, code):
message = ''
if isinstance(code, dict):
code = int(code['error'])
if isinstance(code, Response):
body = code.request.body
body = dict(list((x.split('=')[0], unquote(x.split('=')[1]))
for x in body.split('&')))
message = body
code = int(code.json()['error'])
name = ApiErrorCode(code).name
message = "{name}, {code}\n{extra}".format(name=name, code=code, extra=message)
message = message.strip()
super(ApiException, self).__init__(message)
| 28.485294
| 87
| 0.661848
| 1,835
| 0.947341
| 0
| 0
| 0
| 0
| 0
| 0
| 50
| 0.025813
|