hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ab19b33878e4231a0251e6da9bae691e64d9f0b0
| 207
|
py
|
Python
|
frontadmin/urls.py
|
washim/django-frontadmin
|
6be3d5a745523bec72f2afb71483b8a29d979150
|
[
"MIT"
] | null | null | null |
frontadmin/urls.py
|
washim/django-frontadmin
|
6be3d5a745523bec72f2afb71483b8a29d979150
|
[
"MIT"
] | null | null | null |
frontadmin/urls.py
|
washim/django-frontadmin
|
6be3d5a745523bec72f2afb71483b8a29d979150
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from frontadmin import views
urlpatterns = [
path('accounts/', include('django.contrib.auth.urls')),
path('accounts/profile/', views.profile, name='profile'),
]
| 29.571429
| 61
| 0.719807
|
7d3aeb9a67eb55881db026311e866edaa9a08c9c
| 1,546
|
py
|
Python
|
var/spack/repos/builtin/packages/py-fenics-ufl/package.py
|
klevzoff/spack
|
396936d24173254ecf4148bc460702185e4c99e5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2019-02-10T13:47:48.000Z
|
2019-04-17T13:05:17.000Z
|
var/spack/repos/builtin/packages/py-fenics-ufl/package.py
|
klevzoff/spack
|
396936d24173254ecf4148bc460702185e4c99e5
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 17
|
2019-03-21T15:54:00.000Z
|
2022-03-29T19:34:28.000Z
|
var/spack/repos/builtin/packages/py-fenics-ufl/package.py
|
Kerilk/spack
|
e027942b55407a4a5fe323b93d8e57200c873a43
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2
|
2018-04-06T09:04:11.000Z
|
2020-01-24T12:52:12.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyFenicsUfl(PythonPackage):
"""The Unified Form Language (UFL) is a domain specific language for
declaration of finite element discretizations of variational forms. More
precisely, it defines a flexible interface for choosing finite element
spaces and defining expressions for weak forms in a notation close to
mathematical notation."""
homepage = "https://fenicsproject.org/"
url = "https://github.com/FEniCS/ufl/archive/2019.1.0.tar.gz"
git = "https://github.com/FEniCS/ufl.git"
maintainers = ["js947", "chrisrichardson"]
version("master", branch="master")
version('2019.1.0', sha256='46ac0df4e96327be10b9576d2b8fa8b2c4ca62d3c681d407f5718b162d3ca22d')
version('2018.1.0', sha256='b0d4c2f43f396fd5609317b70d55b53b89c649962fc8a593f4e0e21607da211d')
version('2017.2.0.post0', sha256='111e77707cd6731584b1041f405c2fd3f1752a86c51fd9c430524bd396f293b0')
version('2017.2.0', sha256='0adff7a511185b20c38ddaccdeed6c1b2ecafe4b163c688bfd1316d5c3b1c00d')
version('2017.1.0.post1', sha256='82c8170f44c2392c7e60aa86495df22cc209af50735af8115dc35aeda4b0ca96')
version('2016.2.0', tag='ufl-2016.2.0')
depends_on("python@3.5:", type=('build', 'run'))
depends_on("py-setuptools", type="build")
depends_on("py-numpy", type=("build", "run"))
| 48.3125
| 104
| 0.743855
|
d8a5abcf3e0c33cf47f55e85e12a03eac8c0bbed
| 4,104
|
py
|
Python
|
read_ravintolat_ja_kaupat.py
|
launis/areadata
|
8cf0e30ec489ce9655fcd9829284d1ec70e7360d
|
[
"BSD-3-Clause"
] | null | null | null |
read_ravintolat_ja_kaupat.py
|
launis/areadata
|
8cf0e30ec489ce9655fcd9829284d1ec70e7360d
|
[
"BSD-3-Clause"
] | null | null | null |
read_ravintolat_ja_kaupat.py
|
launis/areadata
|
8cf0e30ec489ce9655fcd9829284d1ec70e7360d
|
[
"BSD-3-Clause"
] | null | null | null |
#this section takes care of reading all needed alco licese data
def ravintolat_ja_kaupat(url, post):
"""Reads licensed bars and shops right to sell alco
url: data from register
post: data from postcodes
Example url = "http://avoindata.valvira.fi/alkoholi/alkoholilupa_toimipaikkatiedot_ABC.csv"
Format
https://www.avoindata.fi/data/fi/dataset/alkoholielinkeinorekisteri
Returns:
register of postcodes with restaurants and bars
The data includes all postcodes even where
restaurants and bars are empty
"""
import pandas as pd
from supportfunctions import add_zeros
ravintolat=pd.read_csv(url, sep=";", encoding="ISO-8859-1", low_memory=False)
df_obj = ravintolat.select_dtypes(['object'])
ravintolat[df_obj.columns] = df_obj.apply(lambda x: x.str.strip())
ravintolat['POSTINUMERO'] = ravintolat['POSTINUMERO'].apply(add_zeros)
#decide if this is a bar or shop
ravintolat['baari'] = (ravintolat['LUPATYYPPI'].str.contains('ann', case = False)) | (ravintolat['LUPATYYPPI'].str.contains('A', case = True)) | (ravintolat['LUPATYYPPI'].str.contains('B', case = True))
pnro_baari = ravintolat[ravintolat['baari']==False].reset_index().groupby(['POSTINUMERO'],as_index=False).count()[['POSTINUMERO','baari']].copy()
pnro_myymala = ravintolat[ravintolat['baari']==True].reset_index().groupby(['POSTINUMERO'],as_index=False).count()[['POSTINUMERO','baari']].copy()
pnro_palvelut = pd.merge(pnro_baari, pnro_myymala, how='outer', on='POSTINUMERO', copy=True, sort=True).copy()
pnro_palvelut.rename(columns={'POSTINUMERO' : 'Postinumero','baari_x': 'Ravintolat', 'baari_y': 'Myymälät'}, inplace=True)
pnro_palvelut = pnro_palvelut[['Postinumero', 'Ravintolat', 'Myymälät' ]].copy()
pnro_palvelut=pd.merge(post, pnro_palvelut, how='left', left_on = 'postcode',right_on= 'Postinumero',
left_index=False, right_index=False,
suffixes=('_x', '_y'), copy=True, indicator=False,
validate=None).copy()
pnro_palvelut.drop(['Postinumero'], axis=1, inplace=True)
pnro_palvelut.rename(columns={'postcode' : 'Postinumero'}, inplace=True)
#postcodes without any services to 0-values
pnro_palvelut['Ravintolat'].fillna(0, inplace=True)
pnro_palvelut['Myymälät'].fillna(0, inplace=True)
pnro_palvelut = pnro_palvelut[['Postinumero', 'Ravintolat', 'Myymälät']].copy()
return(pnro_palvelut)
def read_ravintolat_ja_kaupat(path, post, url_ravintolat ):
"""This function reads all needed data
either from file or calls a function to fetch data
via API calls directly from sources and then writes data to files
Args:
path: path, where the data is stored
post: data of postcodes
url_ravintolat : csv data file
see:
https://www.avoindata.fi/data/fi/dataset/alkoholielinkeinorekisteri
Returns:
ravintolat: dataframe of ravintolat and shops allowed sella lc
"""
import os
import pandas as pd
import inspect
from supportfunctions import add_zeros
#read post and muncipalities
filename_ravintolat_ja_kaupat = 'ravintolat_ja_kaupat.csv'
filename_ravintolat_ja_kaupat = os.path.join(path, filename_ravintolat_ja_kaupat)
if os.access(filename_ravintolat_ja_kaupat, os.R_OK):
#read it from files
print(inspect.stack()[0][3],' read from file')
ravintolat= pd.read_csv(filename_ravintolat_ja_kaupat, encoding="ISO-8859-1")
ravintolat.loc[:,'Postinumero'] = ravintolat['Postinumero'].apply(add_zeros)
else:
#read restaurant/shop data
#https://www.avoindata.fi/data/fi/dataset/alkoholielinkeinorekisteri
print(inspect.stack()[0][3],' read from API')
ravintolat= ravintolat_ja_kaupat(url_ravintolat, post)
if os.path.exists(path):
ravintolat.to_csv(filename_ravintolat_ja_kaupat, index=False, encoding="ISO-8859-1")
return(ravintolat)
| 43.659574
| 207
| 0.682018
|
97a117b2056542869917d47426a5629822e2aa8b
| 9,613
|
py
|
Python
|
dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/workbook.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | 1
|
2020-01-18T18:23:26.000Z
|
2020-01-18T18:23:26.000Z
|
dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/workbook.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | null | null | null |
dist/vengeance-1.0.3.tar/dist/vengeance-1.0.3/vengeance/excel_com/workbook.py
|
michael-ross-ven/vengeance
|
53c6eefba0573936d22a55ba5900744ac701f4b9
|
[
"MIT"
] | null | null | null |
from time import sleep
import ctypes
import pythoncom
# noinspection PyUnresolvedReferences
from pythoncom import com_error
from ctypes import byref
from ctypes import PyDLL
from ctypes import POINTER
from ctypes.wintypes import DWORD
from ctypes.wintypes import BOOL
from win32com.client import Dispatch as pywin_dispatch
# from win32com.client.gencache import EnsureDispatch as pywin_dispatch
from comtypes import COMError
from comtypes import GUID
from comtypes import IUnknown
from comtypes.automation import IDispatch
from comtypes.client.dynamic import Dispatch
from comtypes.client import CreateObject
from .. util.text import vengeance_message
from .. util.filesystem import assert_path_exists
from .. util.filesystem import standardize_path
from .. util.filesystem import file_extension
from . excel_constants import *
AccessibleObjectFromWindow = ctypes.oledll.oleacc.AccessibleObjectFromWindow
FindWindowEx = ctypes.windll.user32.FindWindowExA
GetWindowText = ctypes.windll.user32.GetWindowTextA
SetForegroundWindow = ctypes.windll.user32.SetForegroundWindow
corrupt_hwnds = set()
def open_workbook(path,
excel_instance=None,
*,
read_only=False,
update_links=True):
wb = is_workbook_open(path)
if wb is None:
if not excel_instance:
excel_app = empty_excel_instance() or new_excel_instance()
else:
excel_app = excel_instance
wb = __workbook_from_excel_app(excel_app, path, update_links, read_only)
if wb.ReadOnly is False and read_only:
vengeance_message("'{}' is NOT opened read-only".format(wb.Name))
sleep(3)
if wb.ReadOnly and read_only is False:
vengeance_message("('{}' opened as read-only)".format(wb.Name))
sleep(3)
return wb
# noinspection PyUnusedLocal
def close_workbook(wb, save):
"""
all references need to be severed for excel_com pointer to be released
variables should be set to None
"""
if save and wb.ReadOnly:
raise AssertionError("workbook: '{}' is open read-only, cannot save and close".format(wb.Name))
excel_app = wb.Application
if save:
wb.Save()
else:
excel_app.DisplayAlerts = False
wb.Close()
wb = None
if save is False:
excel_app.DisplayAlerts = True
if excel_app.Workbooks.Count == 0:
excel_app.Quit()
excel_app = None
def is_workbook_open(path):
""" scan all open workbooks across all Excel sessions, match is determined by identical file path """
path = standardize_path(path)
assert_path_exists(path)
window_h = FindWindowEx(0, 0, xl_class_name, None)
while window_h != 0:
for wb in __workbooks_from_hwnd(window_h):
path_search = standardize_path(wb.FullName)
if path == path_search:
return wb
window_h = FindWindowEx(0, window_h, xl_class_name, None)
return None
def new_excel_instance():
excel_app = CreateObject('Excel.Application', dynamic=True)
excel_app = __comtype_to_pywin_obj(excel_app, IDispatch)
excel_app = pywin_dispatch(excel_app)
# excel_app = pywin_dispatch('Excel.Application')
excel_app.WindowState = xl_maximized
excel_app.Visible = True
app_to_foreground(excel_app)
reload_all_add_ins(excel_app)
return excel_app
def empty_excel_instance():
window_h = FindWindowEx(0, 0, xl_class_name, None)
while window_h != 0:
excel_app = __excel_app_from_hwnd(window_h)
if __is_excel_app_empty(excel_app):
vengeance_message('utilizing empty Excel instance ...')
return excel_app
window_h = FindWindowEx(0, window_h, xl_class_name, None)
return None
def any_excel_instance():
return pywin_dispatch('Excel.Application')
# def any_excel_instance_old():
# window_h = FindWindowEx(0, 0, xl_class_name, None)
#
# while window_h != 0:
# excel_app = __excel_app_from_hwnd(window_h)
# if excel_app is not None:
# return excel_app
#
# window_h = FindWindowEx(0, window_h, xl_class_name, None)
#
# return None
def __is_excel_app_empty(excel_app):
if excel_app is None:
return False
workbooks = list(excel_app.Workbooks)
if not workbooks:
excel_app.Visible = True
return True
if len(workbooks) == 1:
wb = workbooks[0]
if wb.Saved and wb.Name == 'Book1':
ws = wb.Sheets[1]
if ws.Name == 'Sheet1' and ws.UsedRange.Address == '$A$1':
return True
return False
def __workbook_from_excel_app(excel_app, path, update_links, read_only):
if read_only:
vengeance_message('opening workbook as read-only ...')
assert_path_exists(path)
excel_app.DisplayAlerts = False
wb = excel_app.Workbooks.Open(path, update_links, read_only)
excel_app.DisplayAlerts = True
return wb
def __workbooks_from_hwnd(window_h):
excel_app = __excel_app_from_hwnd(window_h)
if excel_app is not None:
return list(excel_app.Workbooks)
else:
return []
def __excel_app_from_hwnd(window_h):
"""
comtypes library is used to search windows handles for Excel application,
then converts that pointer to a pywin object thru __comtype_to_pywin_obj()
sometimes, non-Excel applications are running under the same window_h
as an Excel process, like "print driver host for applications"
these will fail to return a valid excel7_wnd for FindWindowEx,
but killing these processes will also bring down the Excel application, which is
not neccessarily corrupt
"""
global corrupt_hwnds
if window_h in corrupt_hwnds:
return None
desk_wnd = FindWindowEx(window_h, None, xl_desk_class, None)
excel7_wnd = FindWindowEx(desk_wnd, None, xl_excel7_class, None)
if excel7_wnd == 0:
corrupt_hwnds.add(window_h)
if __is_excel_process(window_h):
__kill_task(window_h)
return None
cls_id = GUID.from_progid(xl_clsid)
obj_ptr = ctypes.POINTER(IDispatch)()
AccessibleObjectFromWindow(excel7_wnd,
native_om,
byref(cls_id),
byref(obj_ptr))
window = Dispatch(obj_ptr)
try:
com_obj = window.application
excel_app = __comtype_to_pywin_obj(com_obj, IDispatch)
excel_app = pywin_dispatch(excel_app)
excel_app.Visible = True
return excel_app
except (COMError, com_error, NameError) as e:
raise ChildProcessError('remote procedure call to Excel application rejected\n'
'(check if cursor is still active within a cell somewhere, '
'Excel will reject automation calls while waiting on '
'user input)') from e
def __comtype_to_pywin_obj(ptr, interface):
"""Convert a comtypes pointer 'ptr' into a pythoncom PyI<interface> object.
'interface' specifies the interface we want; it must be a comtypes
interface class. The interface must be implemented by the object;
and the interface must be known to pythoncom.
"""
com_obj = PyDLL(pythoncom.__file__).PyCom_PyObjectFromIUnknown
com_obj.restype = ctypes.py_object
com_obj.argtypes = (ctypes.POINTER(IUnknown), ctypes.c_void_p, BOOL)
# noinspection PyProtectedMember
return com_obj(ptr._comobj, byref(interface._iid_), True)
def __is_excel_process(window_h):
SysAllocStringLen = ctypes.windll.oleaut32.SysAllocStringLen
SysAllocStringLen.argtypes = (ctypes.c_wchar_p, ctypes.c_uint)
SysAllocStringLen.restype = ctypes.POINTER(ctypes.c_char)
chr_buffer = SysAllocStringLen(' ' * 255, 255)
GetWindowText(window_h, chr_buffer, 255)
name = ctypes.cast(chr_buffer, ctypes.c_char_p).value
name = name.decode('ascii').lower()
return name == 'excel'
def __kill_task(window_h):
GetWindowThreadProcessId = ctypes.windll.user32.GetWindowThreadProcessId
OpenProcess = ctypes.windll.kernel32.OpenProcess
TerminateProcess = ctypes.windll.kernel32.TerminateProcess
CloseHandle = ctypes.windll.kernel32.CloseHandle
vengeance_message('attempting to kill corrupt Excel application: {}'.format(window_h))
lp_ptr = POINTER(DWORD)()
GetWindowThreadProcessId(window_h, byref(lp_ptr))
handle = OpenProcess(process_terminate, False, lp_ptr)
TerminateProcess(handle, -1)
CloseHandle(handle)
def app_to_foreground(excel_app):
excel_app.Visible = True
SetForegroundWindow(excel_app.Hwnd)
def add_in_exists(excel_app, name):
try:
excel_app.AddIns(name)
except COMError:
return False
return True
def reload_all_add_ins(excel_app):
vengeance_message('reloading Excel add-ins...')
for add_in in excel_app.AddIns:
if add_in.Installed:
name = add_in.Name
try:
add_in.Installed = False
add_in.Installed = True
vengeance_message('{}'.format(name))
except COMError:
vengeance_message('failed to load add-in: {}' + name)
print()
def reload_add_in(excel_app, name):
if add_in_exists(excel_app, name):
excel_app.addins(name).Installed = False
excel_app.addins(name).Installed = True
def is_workbook_an_addin(f_name):
return 'xla' in file_extension(f_name)
| 28.610119
| 105
| 0.681473
|
e4f016f2212e6bf6c83176814d17c3d77dccac3e
| 5,622
|
py
|
Python
|
mnist/mnist_deep.py
|
ling7334/tensorflow-get-started
|
9aa31735e9fbbd9ea56e228b437eabe74222780e
|
[
"Apache-2.0"
] | null | null | null |
mnist/mnist_deep.py
|
ling7334/tensorflow-get-started
|
9aa31735e9fbbd9ea56e228b437eabe74222780e
|
[
"Apache-2.0"
] | null | null | null |
mnist/mnist_deep.py
|
ling7334/tensorflow-get-started
|
9aa31735e9fbbd9ea56e228b437eabe74222780e
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A deep MNIST classifier using convolutional layers.
See extensive documentation at
https://www.tensorflow.org/get_started/mnist/pros
"""
# Disable linter warnings to maintain consistency with tutorial.
# pylint: disable=invalid-name
# pylint: disable=g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import sys
from tensorflow.examples.tutorials.mnist import input_data
import tensorflow as tf
FLAGS = None
def deepnn(x):
"""deepnn builds the graph for a deep net for classifying digits.
Args:
x: an input tensor with the dimensions (N_examples, 784), where 784 is the
number of pixels in a standard MNIST image.
Returns:
A tuple (y, keep_prob). y is a tensor of shape (N_examples, 10), with values
equal to the logits of classifying the digit into one of 10 classes (the
digits 0-9). keep_prob is a scalar placeholder for the probability of
dropout.
"""
# Reshape to use within a convolutional neural net.
# Last dimension is for "features" - there is only one here, since images are
# grayscale -- it would be 3 for an RGB image, 4 for RGBA, etc.
x_image = tf.reshape(x, [-1, 28, 28, 1])
# First convolutional layer - maps one grayscale image to 32 feature maps.
W_conv1 = weight_variable([5, 5, 1, 32])
b_conv1 = bias_variable([32])
h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1)
# Pooling layer - downsamples by 2X.
h_pool1 = max_pool_2x2(h_conv1)
# Second convolutional layer -- maps 32 feature maps to 64.
W_conv2 = weight_variable([5, 5, 32, 64])
b_conv2 = bias_variable([64])
h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)
# Second pooling layer.
h_pool2 = max_pool_2x2(h_conv2)
# Fully connected layer 1 -- after 2 round of downsampling, our 28x28 image
# is down to 7x7x64 feature maps -- maps this to 1024 features.
W_fc1 = weight_variable([7 * 7 * 64, 1024])
b_fc1 = bias_variable([1024])
h_pool2_flat = tf.reshape(h_pool2, [-1, 7 * 7 * 64])
h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)
# Dropout - controls the complexity of the model, prevents co-adaptation of
# features.
keep_prob = tf.placeholder(tf.float32)
h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)
# Map the 1024 features to 10 classes, one for each digit
W_fc2 = weight_variable([1024, 10])
b_fc2 = bias_variable([10])
y_conv = tf.matmul(h_fc1_drop, W_fc2) + b_fc2
return y_conv, keep_prob
def conv2d(x, W):
"""conv2d returns a 2d convolution layer with full stride."""
return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')
def max_pool_2x2(x):
"""max_pool_2x2 downsamples a feature map by 2X."""
return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],
strides=[1, 2, 2, 1], padding='SAME')
def weight_variable(shape):
"""weight_variable generates a weight variable of a given shape."""
initial = tf.truncated_normal(shape, stddev=0.1)
return tf.Variable(initial)
def bias_variable(shape):
"""bias_variable generates a bias variable of a given shape."""
initial = tf.constant(0.1, shape=shape)
return tf.Variable(initial)
def main(_):
# Import data
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
# Create the model
x = tf.placeholder(tf.float32, [None, 784])
# Define loss and optimizer
y_ = tf.placeholder(tf.float32, [None, 10])
# Build the graph for the deep net
y_conv, keep_prob = deepnn(x)
cross_entropy = tf.reduce_mean(
tf.nn.softmax_cross_entropy_with_logits(labels=y_, logits=y_conv))
train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)
correct_prediction = tf.equal(tf.argmax(y_conv, 1), tf.argmax(y_, 1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
for i in range(20000):
batch = mnist.train.next_batch(50)
if i % 100 == 0:
train_accuracy = accuracy.eval(feed_dict={
x: batch[0], y_: batch[1], keep_prob: 1.0})
print('step %d, training accuracy %g' % (i, train_accuracy))
train_step.run(
feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5})
print('test accuracy %g' % accuracy.eval(feed_dict={
x: mnist.test.images, y_: mnist.test.labels, keep_prob: 1.0}))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', type=str,
default='/tmp/tensorflow/mnist/input_data',
help='Directory for storing input data')
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed)
| 35.582278
| 82
| 0.66809
|
fab205fec9d7172abef803154bac07a864adf53a
| 2,640
|
py
|
Python
|
kubernetes/test/test_extensions_v1beta1_http_ingress_path.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_extensions_v1beta1_http_ingress_path.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/test/test_extensions_v1beta1_http_ingress_path.py
|
mariusgheorghies/python
|
68ac7e168963d8b5a81dc493b1973d29e903a15b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.extensions_v1beta1_http_ingress_path import ExtensionsV1beta1HTTPIngressPath # noqa: E501
from kubernetes.client.rest import ApiException
class TestExtensionsV1beta1HTTPIngressPath(unittest.TestCase):
"""ExtensionsV1beta1HTTPIngressPath unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ExtensionsV1beta1HTTPIngressPath
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.extensions_v1beta1_http_ingress_path.ExtensionsV1beta1HTTPIngressPath() # noqa: E501
if include_optional :
return ExtensionsV1beta1HTTPIngressPath(
backend = kubernetes.client.models.extensions/v1beta1/ingress_backend.extensions.v1beta1.IngressBackend(
resource = kubernetes.client.models.v1/typed_local_object_reference.v1.TypedLocalObjectReference(
api_group = '0',
kind = '0',
name = '0', ),
service_name = '0',
service_port = kubernetes.client.models.service_port.servicePort(), ),
path = '0',
path_type = '0'
)
else :
return ExtensionsV1beta1HTTPIngressPath(
backend = kubernetes.client.models.extensions/v1beta1/ingress_backend.extensions.v1beta1.IngressBackend(
resource = kubernetes.client.models.v1/typed_local_object_reference.v1.TypedLocalObjectReference(
api_group = '0',
kind = '0',
name = '0', ),
service_name = '0',
service_port = kubernetes.client.models.service_port.servicePort(), ),
)
def testExtensionsV1beta1HTTPIngressPath(self):
"""Test ExtensionsV1beta1HTTPIngressPath"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 38.823529
| 128
| 0.649621
|
4c1d460c7427b59f4d3b6c43f403c4da2e3820e0
| 4,577
|
py
|
Python
|
bin/global_search_imgt_oas.py
|
Merck/BioPhi-2021-publication
|
988a34f4c482321105151fb626ffea6d5e136862
|
[
"MIT"
] | 3
|
2021-11-03T07:07:09.000Z
|
2021-12-08T17:13:42.000Z
|
bin/global_search_imgt_oas.py
|
Merck/BioPhi-2021-publication
|
988a34f4c482321105151fb626ffea6d5e136862
|
[
"MIT"
] | null | null | null |
bin/global_search_imgt_oas.py
|
Merck/BioPhi-2021-publication
|
988a34f4c482321105151fb626ffea6d5e136862
|
[
"MIT"
] | 1
|
2021-08-24T15:05:28.000Z
|
2021-08-24T15:05:28.000Z
|
#!/usr/bin/env python
import pandas as pd
import os
import re
import numpy as np
import argparse
from abnumber import Chain, Position
import gzip
import json
def iterate_oas_json(path, limit=None):
if path.endswith('.json.gz'):
gzipped = True
elif path.endswith('.json'):
gzipped = False
else:
raise ValueError(f'Expected .json or .json.gz file, got: {path}')
with (gzip.open(path) if gzipped else open(path)) as f:
i = 0
for line in f:
if limit and i > limit:
break
item = json.loads(line)
if 'seq' not in item:
# skip metadata row
continue
i += 1
data = json.loads(item['data'])
v_germline = item['v']
if v_germline.startswith('IGHV'):
chain_type = 'H'
elif v_germline.startswith('IGLV'):
chain_type = 'L'
elif v_germline.startswith('IGKV'):
chain_type = 'K'
else:
raise ValueError(f'Invalid germline "{v_germline}": {path}')
aa_dict = {Position.from_string(pos, chain_type=chain_type, scheme='imgt'): aa \
for region, region_data in data.items() for pos, aa in region_data.items()}
yield Chain(sequence=None, aa_dict=aa_dict, name=item['original_name'], scheme='imgt',
chain_type=chain_type, tail='')
def evaluate_hit(query, target, result={}, same_length=False):
"""
Compare all query and target sequence positions, return number of matches
If previous 'result' is provided, return (None, None) if an improvement has not been found.
"""
if same_length:
# Only consider pairs with same sequence length
if len(query) != len(target):
return None
best_matches = result.get('num_matches')
num_matches = get_matches(query, target)
# Exit if we don't improve matches
if best_matches is not None and num_matches < best_matches:
return None
return num_matches
def get_matches(query, target):
alignment = query.align(target)
return len(alignment) - alignment.num_mutations()
if __name__ == "__main__":
# Parse command line
parser = argparse.ArgumentParser()
parser.add_argument("targets", nargs='+', help="Target OAS data-unit (gzipped) JSON file path(s).")
parser.add_argument("--query", required=True, help="Input query sequences as ANARCI CSV (IMGT-aligned) file path.")
parser.add_argument("--output", required=True, help="Output CSV file path.")
parser.add_argument("--limit", type=int, help="Check only first N rows in each JSON file.")
parser.add_argument("--debug", action='store_true', help="Print out each alignment.")
parser.add_argument("--same-length", action='store_true', help="Only consider pairs with same sequence length.")
options = parser.parse_args()
if options.debug and not options.limit:
raise ValueError('Only use --debug with --limit')
queries = Chain.from_anarci_csv(options.query, scheme='imgt', as_series=True)
print(f'Searching {len(queries)} antibodies in {len(options.targets)} JSON files...')
results = {name: {} for name in queries.index}
for json_path in options.targets:
for hit in iterate_oas_json(json_path, limit=options.limit):
for query in queries:
num_matches = evaluate_hit(query, hit, results[query.name], same_length=options.same_length)
if options.debug:
print(f'{hit.name} VS {query.name}:')
print(hit.align(query))
print('matches:', num_matches)
if num_matches is None:
continue
# save improvement
results[query.name] = {
'num_matches': num_matches,
'hit_name': hit.name,
'hit_seq': hit.seq
}
sorted_index = [name for name in queries.index if results[name]]
sorted_hits = [Chain(results[name]['hit_seq'], scheme='imgt', name=name) for name in sorted_index]
table = Chain.to_dataframe(sorted_hits)
if not table.empty:
table.insert(1, 'num_matches', [results[name]['num_matches'] for name in sorted_index])
table.insert(2, 'hit_name', [results[name]['hit_name'] for name in sorted_index])
table.to_csv(options.output)
print(f'Saved {len(table)} hits to: {options.output}')
| 40.504425
| 119
| 0.60957
|
c94bf627a0286c3563028b19b0cf83fff2c1f0f5
| 266
|
py
|
Python
|
python/testData/inspections/PyAbstractClassInspection/conditionalRaiseReturnInElsePart.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PyAbstractClassInspection/conditionalRaiseReturnInElsePart.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PyAbstractClassInspection/conditionalRaiseReturnInElsePart.py
|
truthiswill/intellij-community
|
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
class A:
@classmethod
def test(cls, param):
return None
class B(A):
@classmethod
def test(cls, param):
if param == 1:
print(param)
else:
return 1
raise NotImplementedError
class C(B):
pass
| 14.777778
| 33
| 0.522556
|
8469b472dfc4b2765e967a524f07b5008262436f
| 157,527
|
py
|
Python
|
tiledb/tests/test_libtiledb.py
|
nguyenv/TileDB-Py
|
47e8f2deeab26e931fe991318339a2ad31a0969e
|
[
"MIT"
] | null | null | null |
tiledb/tests/test_libtiledb.py
|
nguyenv/TileDB-Py
|
47e8f2deeab26e931fe991318339a2ad31a0969e
|
[
"MIT"
] | 7
|
2021-07-16T16:10:15.000Z
|
2021-07-20T18:18:13.000Z
|
tiledb/tests/test_libtiledb.py
|
nguyenv/TileDB-Py
|
47e8f2deeab26e931fe991318339a2ad31a0969e
|
[
"MIT"
] | null | null | null |
import gc
import io
import itertools
import os
import pickle
import random
import re
import urllib
import subprocess
import sys
import textwrap
import time
import unittest
import warnings
from collections import OrderedDict
from contextlib import redirect_stdout
import numpy as np
import psutil
import pytest
from numpy.testing import assert_array_equal
import tiledb
from tiledb.tests.common import (
assert_captured,
assert_subarrays_equal,
assert_unordered_equal,
DiskTestCase,
rand_ascii,
rand_ascii_bytes,
rand_utf8,
)
from tiledb.tests.fixtures import (
sparse_cell_order,
test_incomplete_return_array,
INTEGER_DTYPES,
) # pyright: reportUnusedVariable=warning
from tiledb.util import schema_from_dict
class VersionTest(DiskTestCase):
def test_libtiledb_version(self):
v = tiledb.libtiledb.version()
self.assertIsInstance(v, tuple)
self.assertTrue(len(v) == 3)
self.assertTrue(v[0] >= 1, "TileDB major version must be >= 1")
def test_tiledbpy_version(self):
v = tiledb.version.version
self.assertIsInstance(v, str)
v = tiledb.version()
self.assertIsInstance(v, tuple)
self.assertTrue(3 <= len(v) <= 5)
class StatsTest(DiskTestCase):
def test_stats(self, capfd):
tiledb.libtiledb.stats_enable()
tiledb.libtiledb.stats_reset()
tiledb.libtiledb.stats_disable()
tiledb.libtiledb.stats_enable()
with tiledb.from_numpy(self.path("test_stats"), np.arange(10)) as T:
pass
# basic output check for read stats
tiledb.libtiledb.stats_reset()
with tiledb.open(self.path("test_stats")) as T:
tiledb.libtiledb.stats_enable()
assert_array_equal(T, np.arange(10))
# test stdout version
tiledb.stats_dump()
assert_captured(capfd, "TileDB Embedded Version:")
# test string version
stats_v = tiledb.stats_dump(print_out=False)
if tiledb.libtiledb.version() < (2, 3):
self.assertTrue("==== READ ====" in stats_v)
else:
self.assertTrue('"timers": {' in stats_v)
self.assertTrue("==== Python Stats ====" in stats_v)
if tiledb.libtiledb.version() < (2, 3):
stats_quiet = tiledb.stats_dump(print_out=False, verbose=False)
self.assertTrue("Time to load array schema" not in stats_quiet)
# TODO seems to be a regression, no JSON
stats_json = tiledb.stats_dump(json=True)
self.assertTrue(isinstance(stats_json, dict))
self.assertTrue("CONSOLIDATE_COPY_ARRAY" in stats_json)
@pytest.mark.skipif(
"pytest.tiledb_vfs == 's3'", reason="Test not yet supported with S3"
)
class TestConfig(DiskTestCase):
def test_config(self):
config = tiledb.Config()
config["sm.tile_cache_size"] = 100
assert repr(config) is not None
tiledb.Ctx(config)
def test_ctx_config(self):
ctx = tiledb.Ctx({"sm.tile_cache_size": 100})
config = ctx.config()
self.assertEqual(config["sm.tile_cache_size"], "100")
def test_vfs_config(self):
config = tiledb.Config()
config["vfs.min_parallel_size"] = 1
ctx = tiledb.Ctx()
self.assertEqual(ctx.config()["vfs.min_parallel_size"], "10485760")
vfs = tiledb.VFS(config, ctx=ctx)
self.assertEqual(vfs.config()["vfs.min_parallel_size"], "1")
def test_config_iter(self):
config = tiledb.Config()
k, v = [], []
for p in config.items():
k.append(p[0])
v.append(p[1])
self.assertTrue(len(k) > 0)
k, v = [], []
for p in config.items("vfs.s3."):
k.append(p[0])
v.append(p[1])
self.assertTrue(len(k) > 0)
def test_config_bad_param(self):
config = tiledb.Config()
config["sm.foo"] = "bar"
ctx = tiledb.Ctx(config)
self.assertEqual(ctx.config()["sm.foo"], "bar")
def test_config_unset(self):
config = tiledb.Config()
config["sm.tile_cach_size"] = 100
del config["sm.tile_cache_size"]
# check that config parameter is default
self.assertEqual(
config["sm.tile_cache_size"], tiledb.Config()["sm.tile_cache_size"]
)
def test_config_from_file(self):
# skip: beacuse Config.load doesn't support VFS-supported URIs?
if pytest.tiledb_vfs == "s3":
pytest.skip(
"TODO need more plumbing to make pandas use TileDB VFS to read CSV files"
)
config_path = self.path("config")
with tiledb.FileIO(self.vfs, config_path, "wb") as fh:
fh.write("sm.tile_cache_size 100")
config = tiledb.Config.load(config_path)
self.assertEqual(config["sm.tile_cache_size"], "100")
def test_ctx_config_from_file(self):
config_path = self.path("config")
vfs = tiledb.VFS()
with tiledb.FileIO(vfs, config_path, "wb") as fh:
fh.write("sm.tile_cache_size 100")
ctx = tiledb.Ctx(config=tiledb.Config.load(config_path))
config = ctx.config()
self.assertEqual(config["sm.tile_cache_size"], "100")
def test_ctx_config_dict(self):
ctx = tiledb.Ctx(config={"sm.tile_cache_size": "100"})
config = ctx.config()
assert issubclass(type(config), tiledb.libtiledb.Config)
self.assertEqual(config["sm.tile_cache_size"], "100")
class GroupTestCase(DiskTestCase):
def setup_method(self):
super().setup_method()
self.group1 = self.path("group1")
self.group2 = self.path("group1/group2")
self.group3 = self.path("group1/group3")
self.group4 = self.path("group1/group3/group4")
tiledb.group_create(self.group1)
tiledb.group_create(self.group2)
tiledb.group_create(self.group3)
tiledb.group_create(self.group4)
def is_group(self, uri):
return tiledb.object_type(uri) == "group"
class GroupTest(GroupTestCase):
def test_is_group(self):
self.assertTrue(self.is_group(self.group1))
self.assertTrue(self.is_group(self.group2))
self.assertTrue(self.is_group(self.group3))
self.assertTrue(self.is_group(self.group4))
def test_walk_group(self):
if pytest.tiledb_vfs == "s3":
pytest.skip("S3 does not have empty directories")
groups = []
def append_to_groups(path, obj):
groups.append((os.path.normpath(path), obj))
tiledb.walk(self.path(""), append_to_groups, order="preorder")
groups.sort()
self.assertTrue(groups[0][0].endswith(self.group1) and groups[0][1] == "group")
self.assertTrue(groups[1][0].endswith(self.group2) and groups[1][1] == "group")
self.assertTrue(groups[2][0].endswith(self.group3) and groups[2][1] == "group")
self.assertTrue(groups[3][0].endswith(self.group4) and groups[3][1] == "group")
groups = []
tiledb.walk(self.path(""), append_to_groups, order="postorder")
self.assertTrue(groups[0][0].endswith(self.group2) and groups[0][1] == "group")
self.assertTrue(groups[1][0].endswith(self.group4) and groups[1][1] == "group")
self.assertTrue(groups[2][0].endswith(self.group3) and groups[2][1] == "group")
self.assertTrue(groups[3][0].endswith(self.group1) and groups[3][1] == "group")
def test_remove_group(self):
tiledb.remove(self.group3)
self.assertFalse(self.is_group(self.group3))
self.assertFalse(self.is_group(self.group4))
def test_move_group(self):
self.assertTrue(self.is_group(self.group2))
tiledb.move(self.group2, self.group2 + "_moved")
self.assertFalse(self.is_group(self.group2))
self.assertTrue(self.is_group(self.group2 + "_moved"))
class DimensionTest(unittest.TestCase):
def test_minimal_dimension(self):
dim = tiledb.Dim(domain=(0, 4), tile=5)
self.assertEqual(dim.name, "__dim_0", "automatic dimension name is incorrect")
self.assertEqual(dim.shape, (5,))
self.assertEqual(dim.tile, 5)
def test_dimension(self):
dim = tiledb.Dim(name="d1", domain=(0, 3), tile=2)
self.assertEqual(dim.name, "d1")
self.assertEqual(dim.shape, (4,))
self.assertEqual(dim.tile, 2)
def test_dimension_filter(self):
filters = [tiledb.GzipFilter(2)]
dim = tiledb.Dim(name="df", domain=(0, 2), tile=1, filters=filters)
self.assertEqual(dim.filters, filters)
filter_list = tiledb.FilterList(filters)
dim = tiledb.Dim(name="df", domain=(0, 2), tile=1, filters=filter_list)
self.assertEqual(dim.filters, filter_list)
with self.assertRaises(TypeError):
tiledb.Dim(name="df", domain=(0, 2), tile=1, filters=1)
def test_datetime_dimension(self):
# Regular usage
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020-01-01")),
tile=np.timedelta64(20, "D"),
dtype=np.datetime64("", "D"),
)
self.assertEqual(dim.dtype, np.dtype(np.datetime64("", "D")))
self.assertEqual(dim.tile, np.timedelta64(20, "D"))
self.assertNotEqual(dim.tile, np.timedelta64(21, "D"))
self.assertNotEqual(dim.tile, np.timedelta64(20, "W")) # Sanity check unit
self.assertTupleEqual(
dim.domain, (np.datetime64("2010-01-01"), np.datetime64("2020-01-01"))
)
self.assertEqual(dim.shape, (3653,))
# No tile extent specified: this is not an error in 2.2
if tiledb.libtiledb.version() < (2, 2):
with self.assertRaises(tiledb.TileDBError):
tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020-01-01")),
dtype=np.datetime64("", "D"),
)
# Integer tile extent is ok
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020-01-01")),
tile=20,
dtype=np.datetime64("", "D"),
)
self.assertEqual(dim.dtype, np.dtype(np.datetime64("", "D")))
self.assertEqual(dim.tile, np.timedelta64(20, "D"))
# Year resolution
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010"), np.datetime64("2020")),
tile=5,
dtype=np.datetime64("", "Y"),
)
self.assertEqual(dim.dtype, np.dtype(np.datetime64("", "Y")))
self.assertEqual(dim.tile, np.timedelta64(5, "Y"))
self.assertTupleEqual(
dim.domain, (np.datetime64("2010", "Y"), np.datetime64("2020", "Y"))
)
# End domain promoted to day resolution
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020")),
tile=2,
dtype=np.datetime64("", "D"),
)
self.assertEqual(dim.tile, np.timedelta64(2, "D"))
self.assertTupleEqual(
dim.domain,
(np.datetime64("2010-01-01", "D"), np.datetime64("2020-01-01", "D")),
)
# Domain values can't be integral
with self.assertRaises(TypeError):
dim = tiledb.Dim(
name="d1", domain=(-10, 10), tile=2, dtype=np.datetime64("", "D")
)
class DomainTest(DiskTestCase):
def test_domain(self, capfd):
dims = [
tiledb.Dim("d1", (1, 4), 2, dtype="u8"),
tiledb.Dim("d2", (1, 4), 2, dtype="u8"),
]
dom = tiledb.Domain(*dims)
# check that dumping works
dom.dump()
assert_captured(capfd, "Name: d1")
self.assertEqual(dom.ndim, 2)
self.assertEqual(dom.dtype, np.dtype("uint64"))
self.assertEqual(dom.shape, (4, 4))
# check that we can iterate over the dimensions
dim_names = [dim.name for dim in dom]
self.assertEqual(["d1", "d2"], dim_names)
# check that we can access dim by name
dim_d1 = dom.dim("d1")
self.assertEqual(dim_d1, dom.dim(0))
# check that we can construct directly from a List[Dim]
dom2 = tiledb.Domain(dims)
self.assertEqual(dom, dom2)
def test_datetime_domain(self):
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020-01-01")),
tile=np.timedelta64(20, "D"),
dtype=np.datetime64("", "D"),
)
dom = tiledb.Domain(dim)
self.assertEqual(dom.dtype, np.datetime64("", "D"))
def test_domain_mixed_names_error(self):
with self.assertRaises(tiledb.TileDBError):
tiledb.Domain(
tiledb.Dim("d1", (1, 4), 2, dtype="u8"),
tiledb.Dim("__dim_0", (1, 4), 2, dtype="u8"),
)
def test_ascii_domain(self, capfd):
path = self.path("test_ascii_domain")
dim = tiledb.Dim(name="d", dtype="ascii")
assert dim.dtype == np.bytes_
dom = tiledb.Domain(dim)
dom.dump()
assert_captured(capfd, "Type: STRING_ASCII")
att = tiledb.Attr(name="a", dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=True)
tiledb.SparseArray.create(path, schema)
ascii_coords = ["a", "b", "c", "ABC"]
unicode_coords = ["±", "×", "÷", "√"]
data = [1, 2, 3, 4]
with tiledb.open(path, "w") as A:
with self.assertRaises(tiledb.TileDBError):
A[unicode_coords] = data
A[ascii_coords] = data
class AttributeTest(DiskTestCase):
def test_minimal_attribute(self):
attr = tiledb.Attr()
self.assertTrue(attr.isanon)
self.assertEqual(attr.name, "")
self.assertEqual(attr.dtype, np.float_)
# self.assertEqual(attr.compressor, (None, -1))
self.assertFalse(attr.isvar)
self.assertFalse(attr.isnullable)
def test_attribute(self, capfd):
attr = tiledb.Attr("foo")
attr.dump()
assert_captured(capfd, "Name: foo")
assert attr.name == "foo"
assert attr.dtype == np.float64, "default attribute type is float64"
# compressor, level = attr.compressor
# self.assertEqual(compressor, None, "default to no compression")
# self.assertEqual(level, -1, "default compression level when none is specified")
@pytest.mark.parametrize(
"dtype, fill",
[
(np.dtype(bytes), b"abc"),
# (str, "defg"),
(np.float32, np.float32(0.4023573667780681)),
(np.float64, np.float64(0.0560602549760851)),
(np.dtype("M8[ns]"), np.timedelta64(11, "ns")),
(np.dtype([("f0", "<i4"), ("f1", "<i4"), ("f2", "<i4")]), (1, 2, 3)),
],
)
def test_attribute_fill(self, dtype, fill):
attr = tiledb.Attr("", dtype=dtype, fill=fill)
assert np.array(attr.fill, dtype=dtype) == np.array(fill, dtype=dtype)
path = self.path()
dom = tiledb.Domain(tiledb.Dim(domain=(0, 0), tile=1, dtype=np.int64))
schema = tiledb.ArraySchema(domain=dom, attrs=(attr,))
tiledb.DenseArray.create(path, schema)
with tiledb.open(path) as R:
assert R.multi_index[0][""] == np.array(fill, dtype=dtype)
assert R[0] == np.array(fill, dtype=dtype)
if not hasattr(dtype, "fields"):
# record type unsupported for .df
assert R.df[0][""].values == np.array(fill, dtype=dtype)
def test_full_attribute(self, capfd):
filter_list = tiledb.FilterList([tiledb.ZstdFilter(10)])
filter_list = tiledb.FilterList([tiledb.ZstdFilter(10)])
attr = tiledb.Attr("foo", dtype=np.int64, filters=filter_list)
attr.dump()
assert_captured(capfd, "Name: foo")
self.assertEqual(attr.name, "foo")
self.assertEqual(attr.dtype, np.int64)
# <todo>
# compressor, level = attr.compressor
# self.assertEqual(compressor, "zstd")
# self.assertEqual(level, 10)
def test_ncell_attribute(self):
dtype = np.dtype([("", np.int32), ("", np.int32), ("", np.int32)])
attr = tiledb.Attr("foo", dtype=dtype)
self.assertEqual(attr.dtype, dtype)
self.assertEqual(attr.ncells, 3)
# dtype subarrays not supported
with self.assertRaises(TypeError):
tiledb.Attr("foo", dtype=np.dtype((np.int32, 2)))
# mixed type record arrays not supported
with self.assertRaises(TypeError):
tiledb.Attr("foo", dtype=np.dtype([("", np.float32), ("", np.int32)]))
def test_ncell_bytes_attribute(self):
dtype = np.dtype((np.bytes_, 10))
attr = tiledb.Attr("foo", dtype=dtype)
self.assertEqual(attr.dtype, dtype)
self.assertEqual(attr.ncells, 10)
def test_bytes_var_attribute(self):
with pytest.warns(DeprecationWarning, match="Attr given `var=True` but"):
attr = tiledb.Attr("foo", var=True, dtype="S1")
self.assertEqual(attr.dtype, np.dtype("S"))
self.assertTrue(attr.isvar)
with pytest.warns(DeprecationWarning, match="Attr given `var=False` but"):
attr = tiledb.Attr("foo", var=False, dtype="S")
self.assertEqual(attr.dtype, np.dtype("S"))
self.assertTrue(attr.isvar)
attr = tiledb.Attr("foo", var=True, dtype="S")
self.assertEqual(attr.dtype, np.dtype("S"))
self.assertTrue(attr.isvar)
attr = tiledb.Attr("foo", var=False, dtype="S1")
self.assertEqual(attr.dtype, np.dtype("S1"))
self.assertFalse(attr.isvar)
attr = tiledb.Attr("foo", dtype="S1")
self.assertEqual(attr.dtype, np.dtype("S1"))
self.assertFalse(attr.isvar)
attr = tiledb.Attr("foo", dtype="S")
self.assertEqual(attr.dtype, np.dtype("S"))
self.assertTrue(attr.isvar)
def test_nullable_attribute(self):
attr = tiledb.Attr("nullable", nullable=True, dtype=np.int32)
self.assertEqual(attr.dtype, np.dtype(np.int32))
self.assertTrue(attr.isnullable)
def test_datetime_attribute(self):
attr = tiledb.Attr("foo", dtype=np.datetime64("", "D"))
assert attr.dtype == np.dtype(np.datetime64("", "D"))
assert attr.dtype != np.dtype(np.datetime64("", "Y"))
assert attr.dtype != np.dtype(np.datetime64)
@pytest.mark.parametrize("sparse", [True, False])
def test_ascii_attribute(self, sparse, capfd):
path = self.path("test_ascii")
dom = tiledb.Domain(
tiledb.Dim(name="d", domain=(1, 4), tile=1, dtype=np.uint32)
)
attrs = [tiledb.Attr(name="A", dtype="ascii", var=True)]
schema = tiledb.ArraySchema(domain=dom, attrs=attrs, sparse=sparse)
tiledb.Array.create(path, schema)
ascii_data = ["a", "b", "c", "ABC"]
unicode_data = ["±", "×", "÷", "√"]
with tiledb.open(path, "w") as A:
if sparse:
with self.assertRaises(tiledb.TileDBError):
A[np.arange(1, 5)] = unicode_data
A[np.arange(1, 5)] = ascii_data
else:
with self.assertRaises(tiledb.TileDBError):
A[:] = unicode_data
A[:] = ascii_data
with tiledb.open(path, "r") as A:
assert A.schema.nattr == 1
A.schema.dump()
assert_captured(capfd, "Type: STRING_ASCII")
assert A.schema.attr("A").dtype == np.bytes_
assert A.schema.attr("A").isascii
assert_array_equal(A[:]["A"], np.asarray(ascii_data, dtype=np.bytes_))
class ArraySchemaTest(DiskTestCase):
def test_schema_basic(self):
dom = tiledb.Domain(
tiledb.Dim("d1", (1, 4), 2, dtype="u8"),
tiledb.Dim("d2", (1, 4), 2, dtype="u8"),
)
attr1 = tiledb.Attr("foo", dtype=float)
attr2 = tiledb.Attr("foo", dtype=int)
# test unique attributes
with self.assertRaises(tiledb.TileDBError):
tiledb.ArraySchema(domain=dom, attrs=(attr1, attr2))
# test schema.check
schema = tiledb.ArraySchema(domain=dom, attrs=(attr1,))
# valid schema does not raise
schema.check()
with self.assertRaises(tiledb.TileDBError):
schema._make_invalid()
schema.check()
def test_dense_array_schema(self):
domain = tiledb.Domain(
tiledb.Dim(domain=(1, 8), tile=2), tiledb.Dim(domain=(1, 8), tile=2)
)
a1 = tiledb.Attr("val", dtype="f8")
schema = tiledb.ArraySchema(domain=domain, attrs=(a1,))
self.assertFalse(schema.sparse)
self.assertEqual(schema.cell_order, "row-major")
self.assertEqual(schema.tile_order, "row-major")
self.assertEqual(schema.domain, domain)
self.assertEqual(schema.ndim, 2)
self.assertEqual(schema.shape, (8, 8))
self.assertEqual(schema.nattr, 1)
self.assertEqual(schema.domain.homogeneous, True)
self.assertEqual(schema.attr(0), a1)
self.assertTrue(schema.has_attr("val"))
self.assertFalse(schema.has_attr("nononoattr"))
self.assertEqual(schema, tiledb.ArraySchema(domain=domain, attrs=(a1,)))
self.assertNotEqual(
schema, tiledb.ArraySchema(domain=domain, attrs=(a1,), sparse=True)
)
with self.assertRaises(tiledb.TileDBError):
schema.allows_duplicates
# test iteration over attributes
self.assertEqual(list(schema), [a1])
with self.assertRaisesRegex(
tiledb.TileDBError,
"Cannot set cell order; Hilbert order is only applicable to sparse arrays",
):
tiledb.ArraySchema(
domain=domain, attrs=(a1,), sparse=False, cell_order="hilbert"
)
def test_dense_array_schema_fp_domain_error(self):
dom = tiledb.Domain(tiledb.Dim(domain=(1, 8), tile=2, dtype=np.float64))
att = tiledb.Attr("val", dtype=np.float64)
with self.assertRaises(tiledb.TileDBError):
tiledb.ArraySchema(domain=dom, attrs=(att,))
def test_sparse_schema(self, capfd):
# create dimensions
d1 = tiledb.Dim("d1", domain=(1, 1000), tile=10, dtype="uint64")
d2 = tiledb.Dim("d2", domain=(101, 10000), tile=100, dtype="uint64")
# create domain
domain = tiledb.Domain(d1, d2)
# create attributes
a1 = tiledb.Attr("a1", dtype="int32,int32,int32")
a2 = tiledb.Attr(
"a2", filters=tiledb.FilterList([tiledb.GzipFilter(-1)]), dtype="float32"
)
# create sparse array with schema
coords_filters = tiledb.FilterList([tiledb.ZstdFilter(4)])
offsets_filters = tiledb.FilterList([tiledb.LZ4Filter(5)])
schema = tiledb.ArraySchema(
domain=domain,
attrs=(a1, a2),
capacity=10,
cell_order="col-major",
tile_order="row-major",
allows_duplicates=True,
sparse=True,
coords_filters=coords_filters,
offsets_filters=offsets_filters,
)
schema.dump()
assert_captured(capfd, "Array type: sparse")
self.assertTrue(schema.sparse)
self.assertEqual(schema.capacity, 10)
self.assertEqual(schema.cell_order, "col-major")
self.assertEqual(schema.tile_order, "row-major")
# <todo>
# self.assertEqual(schema.coords_compressor, ('zstd', 4))
# self.assertEqual(schema.offsets_compressor, ('lz4', 5))
self.assertEqual(schema.domain, domain)
self.assertEqual(schema.ndim, 2)
self.assertEqual(schema.shape, (1000, 9900))
self.assertEqual(schema.nattr, 2)
self.assertEqual(schema.attr(0), a1)
self.assertEqual(schema.attr("a2"), a2)
self.assertEqual(schema.allows_duplicates, True)
self.assertEqual(
schema,
tiledb.ArraySchema(
domain=domain,
attrs=(a1, a2),
capacity=10,
cell_order="col-major",
tile_order="row-major",
allows_duplicates=True,
sparse=True,
coords_filters=coords_filters,
offsets_filters=offsets_filters,
),
)
# test iteration over attributes
self.assertEqual(list(schema), [a1, a2])
with self.assertRaisesRegex(
tiledb.TileDBError,
"Cannot set tile order; Hilbert order is not applicable to tiles",
):
tiledb.ArraySchema(
domain=domain, attrs=(a1,), sparse=True, tile_order="hilbert"
)
def test_sparse_schema_filter_list(self, capfd):
# create dimensions
d1 = tiledb.Dim("d1", domain=(1, 1000), tile=10, dtype="uint64")
d2 = tiledb.Dim("d2", domain=(101, 10000), tile=100, dtype="uint64")
# create domain
domain = tiledb.Domain(d1, d2)
# create attributes
a1 = tiledb.Attr("a1", dtype="int32,int32,int32")
filter_list = tiledb.FilterList([tiledb.GzipFilter()])
a2 = tiledb.Attr("a2", filters=filter_list, dtype="float32")
off_filters_pylist = [tiledb.libtiledb.ZstdFilter(level=10)]
off_filters = tiledb.libtiledb.FilterList(
filters=off_filters_pylist, chunksize=2048
)
coords_filters_pylist = [tiledb.libtiledb.Bzip2Filter(level=5)]
coords_filters = tiledb.libtiledb.FilterList(
filters=coords_filters_pylist, chunksize=4096
)
# create sparse array with schema
schema = tiledb.ArraySchema(
domain=domain,
attrs=(a1, a2),
capacity=10,
cell_order="col-major",
tile_order="row-major",
coords_filters=coords_filters,
offsets_filters=off_filters,
sparse=True,
)
self.assertTrue(schema.sparse)
schema.dump()
assert_captured(capfd, "Array type: sparse")
# make sure we can construct ArraySchema with python lists of filters
schema2 = tiledb.ArraySchema(
domain=domain,
attrs=(a1, a2),
capacity=10,
cell_order="col-major",
tile_order="row-major",
coords_filters=coords_filters_pylist,
offsets_filters=off_filters,
sparse=True,
)
self.assertEqual(len(schema2.coords_filters), 1)
self.assertEqual(len(schema2.offsets_filters), 1)
def test_none_filter_list(self):
with self.assertRaises(ValueError):
tiledb.FilterList([None])
with self.assertRaises(ValueError):
fl = tiledb.FilterList()
fl.append(None)
def test_mixed_string_schema(self):
path = self.path("test_mixed_string_schema")
dims = [
tiledb.Dim(name="dpos", domain=(-100.0, 100.0), tile=10, dtype=np.float64),
tiledb.Dim(name="str_index", tile=None, dtype=np.bytes_),
]
dom = tiledb.Domain(*dims)
attrs = [tiledb.Attr(name="val", dtype=np.float64)]
schema = tiledb.ArraySchema(domain=dom, attrs=attrs, sparse=True)
self.assertTrue(schema.domain.has_dim("str_index"))
self.assertFalse(schema.domain.has_dim("nonono_str_index"))
self.assertTrue(schema.domain.dim("str_index").isvar)
self.assertFalse(schema.domain.dim("dpos").isvar)
self.assertEqual(schema.domain.dim("dpos").dtype, np.double)
self.assertEqual(schema.domain.dim("str_index").dtype, np.bytes_)
self.assertFalse(schema.domain.homogeneous)
tiledb.Array.create(path, schema)
with tiledb.open(path, "r") as arr:
assert_array_equal(arr[:]["str_index"], np.array([], dtype="|S1"))
class ArrayTest(DiskTestCase):
def create_array_schema(self):
domain = tiledb.Domain(
tiledb.Dim(domain=(1, 8), tile=2), tiledb.Dim(domain=(1, 8), tile=2)
)
a1 = tiledb.Attr("val", dtype="f8")
return tiledb.ArraySchema(domain=domain, attrs=(a1,))
def test_array_create(self):
config = tiledb.Config()
config["sm.consolidation.step_min_frag"] = 0
config["sm.consolidation.steps"] = 1
schema = self.create_array_schema()
# persist array schema
tiledb.libtiledb.Array.create(self.path("foo"), schema)
# these should be no-ops
# full signature
tiledb.consolidate(self.path("foo"), config=config)
# kw signature
tiledb.consolidate(uri=self.path("foo"))
# load array in readonly mode
array = tiledb.libtiledb.Array(self.path("foo"), mode="r")
self.assertTrue(array.isopen)
self.assertEqual(array.schema, schema)
self.assertEqual(array.mode, "r")
self.assertEqual(array.uri, self.path("foo"))
# test that we cannot consolidate an array in readonly mode
with self.assertRaises(tiledb.TileDBError):
array.consolidate()
# we have not written anything, so the array is empty
self.assertIsNone(array.nonempty_domain())
array.reopen()
self.assertTrue(array.isopen)
array.close()
self.assertEqual(array.isopen, False)
with self.assertRaises(tiledb.TileDBError):
# cannot get schema from closed array
array.schema
with self.assertRaises(tiledb.TileDBError):
# cannot re-open a closed array
array.reopen()
def test_array_create_with_ctx(self):
schema = self.create_array_schema()
with self.assertRaises(TypeError):
tiledb.libtiledb.Array.create(self.path("foo"), schema, ctx="foo")
# persist array schema
tiledb.libtiledb.Array.create(self.path("foo"), schema, ctx=tiledb.Ctx())
@pytest.mark.skipif(
not (sys.platform == "win32" and tiledb.libtiledb.version() >= (2, 3, 0)),
reason="Shared network drive only on Win32",
)
def test_array_create_on_shared_drive(self):
schema = self.create_array_schema()
uri = self.path(basename="foo", shared=True)
tiledb.libtiledb.Array.create(uri, schema)
# load array in readonly mode
array = tiledb.libtiledb.Array(uri, mode="r")
self.assertTrue(array.isopen)
self.assertEqual(array.schema, schema)
self.assertEqual(array.mode, "r")
self.assertEqual(array.uri, uri)
# we have not written anything, so the array is empty
self.assertIsNone(array.nonempty_domain())
array.reopen()
self.assertTrue(array.isopen)
array.close()
self.assertEqual(array.isopen, False)
with self.assertRaises(tiledb.TileDBError):
# cannot get schema from closed array
array.schema
with self.assertRaises(tiledb.TileDBError):
# cannot re-open a closed array
array.reopen()
def test_array_create_encrypted(self):
config = tiledb.Config()
config["sm.consolidation.step_min_frags"] = 0
config["sm.consolidation.steps"] = 1
schema = self.create_array_schema()
# persist array schema
tiledb.libtiledb.Array.create(
self.path("foo"), schema, key=b"0123456789abcdeF0123456789abcdeF"
)
# check that we can open the array sucessfully
for key in (
b"0123456789abcdeF0123456789abcdeF",
"0123456789abcdeF0123456789abcdeF",
):
with tiledb.libtiledb.Array(self.path("foo"), mode="r", key=key) as array:
self.assertTrue(array.isopen)
self.assertEqual(array.schema, schema)
self.assertEqual(array.mode, "r")
with tiledb.open(self.path("foo"), mode="r", key=key) as array:
self.assertTrue(array.isopen)
self.assertEqual(array.schema, schema)
self.assertEqual(array.mode, "r")
tiledb.consolidate(uri=self.path("foo"), config=config, key=key)
# check that opening the array with the wrong key fails:
with self.assertRaises(tiledb.TileDBError):
tiledb.libtiledb.Array(
self.path("foo"), mode="r", key=b"0123456789abcdeF0123456789abcdeX"
)
# check that opening the array with the wrong key length fails:
with self.assertRaises(tiledb.TileDBError):
tiledb.libtiledb.Array(
self.path("foo"), mode="r", key=b"0123456789abcdeF0123456789abcde"
)
# check that consolidating the array with the wrong key fails:
with self.assertRaises(tiledb.TileDBError):
tiledb.consolidate(
self.path("foo"), config=config, key=b"0123456789abcdeF0123456789abcde"
)
# needs core fix in 2.2.4
@pytest.mark.skipif(
(sys.platform == "win32" and tiledb.libtiledb.version() == (2, 2, 3)),
reason="Skip array_doesnt_exist test on Win32 / libtiledb 2.2.3",
)
def test_array_doesnt_exist(self):
with self.assertRaises(tiledb.TileDBError):
tiledb.libtiledb.Array(self.path("foo"), mode="r")
def test_create_schema_matches(self):
dims = (tiledb.Dim(domain=(0, 6), tile=2),)
dom = tiledb.Domain(*dims)
att = tiledb.Attr(dtype=np.byte)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=True)
uri = self.path("s1")
with self.assertRaises(ValueError):
tiledb.DenseArray.create(uri, schema)
dense_schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
uri = self.path("d1")
with self.assertRaises(ValueError):
tiledb.SparseArray.create(uri, dense_schema)
class MySparseArray(tiledb.SparseArray):
pass
with self.assertRaises(ValueError):
MySparseArray.create(uri, dense_schema)
def test_nonempty_domain_scalar(self):
uri = self.path("test_nonempty_domain_scalar")
dims = tiledb.Dim(domain=(-10, 10), dtype=np.int64, tile=1)
schema = tiledb.ArraySchema(
tiledb.Domain(dims), attrs=[tiledb.Attr(dtype=np.int32)], sparse=True
)
tiledb.Array.create(uri, schema)
with tiledb.open(uri, "w") as A:
A[-1] = 10
A[1] = 11
with tiledb.open(uri, "r") as A:
ned = A.nonempty_domain()
assert_array_equal(ned, ((-1, 1),))
assert isinstance(ned[0][0], int)
assert isinstance(ned[0][1], int)
def test_create_array_overwrite(self):
uri = self.path("test_create_array_overwrite")
dims = tiledb.Dim(domain=(0, 10), dtype=np.int64)
schema = tiledb.ArraySchema(
tiledb.Domain(dims), attrs=[tiledb.Attr(dtype=np.int32)], sparse=True
)
with pytest.warns(UserWarning, match="Overwrite set, but array does not exist"):
tiledb.Array.create(uri, schema, overwrite=True)
with tiledb.open(uri, "w") as A:
A[0] = 1
with tiledb.open(uri, "r") as A:
assert A.nonempty_domain() == ((0, 0),)
# cannot overwrite the array by default
with self.assertRaises(tiledb.TileDBError):
tiledb.Array.create(uri, schema)
tiledb.Array.create(uri, schema, overwrite=True)
# make the old array has been deleted and replaced
with tiledb.open(uri, "r") as A:
assert A.nonempty_domain() is None
class DenseArrayTest(DiskTestCase):
def test_array_1d(self):
A = np.arange(1050)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 1049), tile=100, dtype=np.int64))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
self.assertEqual(len(A), len(T))
self.assertEqual(A.ndim, T.ndim)
self.assertEqual(A.shape, T.shape)
self.assertEqual(1, T.nattr)
self.assertEqual(A.dtype, T.attr(0).dtype)
self.assertEqual(T.dim(T.schema.domain.dim(0).name), T.dim(0))
with self.assertRaises(ValueError):
T.dim(1.0)
self.assertIsInstance(T.timestamp_range, tuple)
self.assertTrue(T.timestamp_range[1] > 0)
# check empty array
B = T[:]
self.assertEqual(A.shape, B.shape)
self.assertEqual(A.dtype, B.dtype)
self.assertIsNone(T.nonempty_domain())
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
# check set array
T[:] = A
read1_timestamp = -1
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
self.assertEqual(((0, 1049),), T.nonempty_domain())
# check timestamp
read1_timestamp = T.timestamp_range
self.assertTrue(read1_timestamp[1] > 0)
# check slicing
assert_array_equal(A, np.array(T))
assert_array_equal(A, T[:])
assert_array_equal(A, T[...])
assert_array_equal(A, T[slice(None)])
assert_array_equal(A[:10], T[:10])
assert_array_equal(A[10:20], T[10:20])
assert_array_equal(A[-10:], T[-10:])
# ellipsis
assert_array_equal(A[:10, ...], T[:10, ...])
assert_array_equal(A[10:50, ...], T[10:50, ...])
assert_array_equal(A[-50:, ...], T[-50:, ...])
assert_array_equal(A[..., :10], T[..., :10])
assert_array_equal(A[..., 10:20], T[..., 10:20])
assert_array_equal(A[..., -50:], T[..., -50:])
# across tiles
assert_array_equal(A[:150], T[:150])
assert_array_equal(A[-250:], T[-250:])
# point index
self.assertEqual(A[0], T[0])
self.assertEqual(A[-1], T[-1])
# point index with all index types
self.assertEqual(A[123], T[np.int8(123)])
self.assertEqual(A[123], T[np.uint8(123)])
self.assertEqual(A[123], T[np.int16(123)])
self.assertEqual(A[123], T[np.uint16(123)])
self.assertEqual(A[123], T[np.int64(123)])
self.assertEqual(A[123], T[np.uint64(123)])
self.assertEqual(A[123], T[np.int32(123)])
self.assertEqual(A[123], T[np.uint32(123)])
# mixed-type slicing
# https://github.com/TileDB-Inc/TileDB-Py/issues/140
self.assertEqual(A[0:1], T[0 : np.uint16(1)])
self.assertEqual(A[0:1], T[np.int64(0) : 1])
with self.assertRaises(IndexError):
# this is a consequence of NumPy promotion rules
self.assertEqual(A[0:1], T[np.uint64(0) : 1])
# basic step
assert_array_equal(A[:50:2], T[:50:2])
assert_array_equal(A[:2:50], T[:2:50])
assert_array_equal(A[10:-1:50], T[10:-1:50])
# indexing errors
with self.assertRaises(IndexError):
T[:, :]
with self.assertRaises(IndexError):
T[:, 50]
with self.assertRaises(IndexError):
T[50, :]
with self.assertRaises(IndexError):
T[0, 0]
# check single ellipsis
with self.assertRaises(IndexError):
T[..., 1:5, ...]
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
# check partial assignment
B = np.arange(1e5, 2e5).astype(A.dtype)
T[190:310] = B[190:310]
read2_timestamp = -1
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:190], T[:190])
assert_array_equal(B[190:310], T[190:310])
assert_array_equal(A[310:], T[310:])
# test timestamps are updated
read2_timestamp = T.timestamp_range
self.assertTrue(read2_timestamp > read1_timestamp)
def test_array_1d_set_scalar(self):
A = np.zeros(50)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 49), tile=50))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
value = -1, 3, 10
A[0], A[1], A[3] = value
T[0], T[1], T[3] = value
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
for value in (-1, 3, 10):
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
A[5:25] = value
T[5:25] = value
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
A[:] = value
T[:] = value
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
def test_array_id_point_queries(self):
# TODO: handle queries like T[[2, 5, 10]] = ?
pass
@pytest.mark.parametrize("dtype", INTEGER_DTYPES)
def test_dense_index_dtypes(self, dtype):
path = self.path()
data = np.arange(0, 3).astype(dtype)
with tiledb.from_numpy(path, data) as A:
pass
with tiledb.open(path) as B:
assert_array_equal(B[:], data)
def test_array_2d(self):
A = np.arange(10000).reshape((1000, 10))
dom = tiledb.Domain(
tiledb.Dim(domain=(0, 999), tile=100), tiledb.Dim(domain=(0, 9), tile=2)
)
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
self.assertEqual(len(A), len(T))
self.assertEqual(A.ndim, T.ndim)
self.assertEqual(A.shape, T.shape)
self.assertEqual(1, T.nattr)
self.assertEqual(A.dtype, T.attr(0).dtype)
# check that the non-empty domain is None
self.assertIsNone(T.nonempty_domain())
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
# Set data
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
# check the non-empty domain spans the whole domain
self.assertEqual(((0, 999), (0, 9)), T.nonempty_domain())
# check array-like
assert_array_equal(A, np.array(T))
# slicing
assert_array_equal(A, T[:])
assert_array_equal(A, T[...])
assert_array_equal(A, T[slice(None)])
# slice first dimension
assert_array_equal(A[:10], T[:10])
assert_array_equal(A[:10], T[:10])
assert_array_equal(A[10:20], T[10:20])
assert_array_equal(A[-10:], T[-10:])
assert_array_equal(A[:10, :], T[:10, :])
assert_array_equal(A[10:20, :], T[10:20, :])
assert_array_equal(A[-10:, :], T[-10:, :])
assert_array_equal(A[:10, ...], T[:10, ...])
assert_array_equal(A[10:20, ...], T[10:20, ...])
assert_array_equal(A[-10:, ...], T[-10:, ...])
assert_array_equal(A[:10, :, ...], T[:10, :, ...])
assert_array_equal(A[10:20, :, ...], T[10:20, :, ...])
assert_array_equal(A[-10:, :, ...], T[-10:, :, ...])
# slice second dimension
assert_array_equal(A[:, :2], T[:, :2])
assert_array_equal(A[:, 2:4], T[:, 2:4])
assert_array_equal(A[:, -2:], T[:, -2:])
assert_array_equal(A[..., :2], T[..., :2])
assert_array_equal(A[..., 2:4], T[..., 2:4])
assert_array_equal(A[..., -2:], T[..., -2:])
assert_array_equal(A[:, ..., :2], T[:, ..., :2])
assert_array_equal(A[:, ..., 2:4], T[:, ..., 2:4])
assert_array_equal(A[:, ..., -2:], T[:, ..., -2:])
# slice both dimensions
assert_array_equal(A[:10, :2], T[:10, :2])
assert_array_equal(A[10:20, 2:4], T[10:20, 2:4])
assert_array_equal(A[-10:, -2:], T[-10:, -2:])
# slice across tile boundries
assert_array_equal(A[:110], T[:110])
assert_array_equal(A[190:310], T[190:310])
assert_array_equal(A[-110:], T[-110:])
assert_array_equal(A[:110, :], T[:110, :])
assert_array_equal(A[190:310, :], T[190:310, :])
assert_array_equal(A[-110:, :], T[-110:, :])
assert_array_equal(A[:, :3], T[:, :3])
assert_array_equal(A[:, 3:7], T[:, 3:7])
assert_array_equal(A[:, -3:], T[:, -3:])
assert_array_equal(A[:110, :3], T[:110, :3])
assert_array_equal(A[190:310, 3:7], T[190:310, 3:7])
assert_array_equal(A[-110:, -3:], T[-110:, -3:])
# single row/col/item
assert_array_equal(A[0], T[0])
assert_array_equal(A[-1], T[-1])
assert_array_equal(A[:, 0], T[:, 0])
assert_array_equal(A[:, -1], T[:, -1])
self.assertEqual(A[0, 0], T[0, 0])
self.assertEqual(A[-1, -1], T[-1, -1])
# too many indices
with self.assertRaises(IndexError):
T[:, :, :]
with self.assertRaises(IndexError):
T[0, :, :]
with self.assertRaises(IndexError):
T[:, 0, :]
with self.assertRaises(IndexError):
T[:, :, 0]
with self.assertRaises(IndexError):
T[0, 0, 0]
# only single ellipsis allowed
with self.assertRaises(IndexError):
T[..., ...]
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
# check partial assignment
B = np.arange(10000, 20000).reshape((1000, 10))
T[190:310, 3:7] = B[190:310, 3:7]
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:190], T[:190])
assert_array_equal(A[:, :3], T[:, :3])
assert_array_equal(B[190:310, 3:7], T[190:310, 3:7])
assert_array_equal(A[310:], T[310:])
assert_array_equal(A[:, 7:], T[:, 7:])
@pytest.mark.skipif(
not (sys.platform == "win32" and tiledb.libtiledb.version() >= (2, 3, 0)),
reason="Shared network drive only on Win32",
)
def test_array_1d_shared_drive(self):
A = np.zeros(50)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 49), tile=50))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(dom, (att,))
uri = self.path("foo", shared=True)
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[:] = A
with tiledb.DenseArray(uri, mode="r") as T:
assert_array_equal(A, T[:])
with tiledb.DenseArray(uri, mode="w") as T:
value = -1, 3, 10
A[0], A[1], A[3] = value
T[0], T[1], T[3] = value
with tiledb.DenseArray(uri, mode="r") as T:
assert_array_equal(A, T[:])
for value in (-1, 3, 10):
with tiledb.DenseArray(uri, mode="w") as T:
A[5:25] = value
T[5:25] = value
with tiledb.DenseArray(uri, mode="r") as T:
assert_array_equal(A, T[:])
with tiledb.DenseArray(uri, mode="w") as T:
A[:] = value
T[:] = value
with tiledb.DenseArray(uri, mode="r") as T:
assert_array_equal(A, T[:])
def test_fixed_string(self):
a = np.array(["ab", "cd", "ef", "gh", "ij", "kl", "", "op"], dtype="|S2")
with tiledb.from_numpy(self.path("fixed_string"), a) as T:
with tiledb.open(self.path("fixed_string")) as R:
self.assertEqual(T.dtype, R.dtype)
self.assertEqual(R.attr(0).ncells, 2)
assert_array_equal(T, R)
def test_ncell_int(self):
a = np.array([(1, 2), (3, 4), (5, 6)], dtype=[("", np.int16), ("", np.int16)])
with tiledb.from_numpy(self.path("ncell_int16"), a) as T:
with tiledb.open(self.path("ncell_int16")) as R:
self.assertEqual(T.dtype, R.dtype)
self.assertEqual(R.attr(0).ncells, 2)
assert_array_equal(T, R)
assert_array_equal(T, R.multi_index[0:2][""])
def test_open_with_timestamp(self):
A = np.zeros(3)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3, dtype=np.int64))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(self.path("foo"), schema)
# write
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
read1_timestamp = -1
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
read1_timestamp = T.timestamp_range
self.assertEqual(T[0], 0)
self.assertEqual(T[1], 0)
self.assertEqual(T[2], 0)
# sleep 200ms and write
time.sleep(0.2)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[0:1] = 1
read2_timestamp = -1
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
read2_timestamp = T.timestamp_range
self.assertTrue(read2_timestamp > read1_timestamp)
# sleep 200ms and write
time.sleep(0.2)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[1:2] = 2
read3_timestamp = -1
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
read3_timestamp = T.timestamp_range
self.assertTrue(read3_timestamp > read2_timestamp > read1_timestamp)
# read at first timestamp
with tiledb.DenseArray(
self.path("foo"), timestamp=read1_timestamp, mode="r"
) as T:
self.assertEqual(T[0], 0)
self.assertEqual(T[1], 0)
self.assertEqual(T[2], 0)
# read at second timestamp
with tiledb.DenseArray(
self.path("foo"), timestamp=read2_timestamp, mode="r"
) as T:
self.assertEqual(T[0], 1)
self.assertEqual(T[1], 0)
self.assertEqual(T[2], 0)
# read at third timestamp
with tiledb.DenseArray(
self.path("foo"), timestamp=read3_timestamp, mode="r"
) as T:
self.assertEqual(T[0], 1)
self.assertEqual(T[1], 2)
self.assertEqual(T[2], 0)
def test_open_timestamp_range(self):
A = np.zeros(3)
path = self.path("open_timestamp_range")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3, dtype=np.int64))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(path, schema)
# write
with tiledb.DenseArray(path, timestamp=1, mode="w") as T:
T[:] = A * 1
with tiledb.DenseArray(path, timestamp=2, mode="w") as T:
T[:] = A * 2
with tiledb.DenseArray(path, timestamp=3, mode="w") as T:
T[:] = A * 3
with tiledb.DenseArray(path, timestamp=4, mode="w") as T:
T[:] = A * 4
def assert_ts(timestamp, result):
with tiledb.DenseArray(path, mode="r", timestamp=timestamp) as T:
assert_array_equal(T, result)
assert_ts(0, A * np.nan)
assert_ts(1, A * 1)
assert_ts(2, A * 2)
assert_ts(3, A * 3)
assert_ts((1, 2), A * 2)
assert_ts((0, 3), A * 3)
assert_ts((1, 3), A * 3)
assert_ts((2, 3), A * 3)
assert_ts((2, 4), A * 3)
assert_ts((None, 2), A * 2)
assert_ts((None, 3), A * 3)
assert_ts((2, None), A * 3)
assert_ts((3, None), A * 3)
assert_ts((3, None), A * 3)
def test_ncell_attributes(self):
dom = tiledb.Domain(tiledb.Dim(domain=(0, 9), tile=10, dtype=int))
attr = tiledb.Attr(dtype=[("", np.int32), ("", np.int32), ("", np.int32)])
schema = tiledb.ArraySchema(domain=dom, attrs=(attr,))
tiledb.DenseArray.create(self.path("foo"), schema)
A = np.ones((10,), dtype=[("", np.int32), ("", np.int32), ("", np.int32)])
self.assertEqual(A.dtype, attr.dtype)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
assert_array_equal(A[:5], T[:5])
def test_complex_attributes(self):
dom = tiledb.Domain(tiledb.Dim(domain=(0, 9), tile=10, dtype=int))
attr = tiledb.Attr(dtype=np.complex64)
schema = tiledb.ArraySchema(domain=dom, attrs=(attr,))
tiledb.DenseArray.create(self.path("foo"), schema)
A = np.random.rand(20).astype(np.float32).view(dtype=np.complex64)
self.assertEqual(schema, tiledb.schema_like(A, dim_dtype=int))
self.assertEqual(A.dtype, attr.dtype)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A, T[:])
assert_array_equal(A[:5], T[:5])
def test_multiple_attributes(self):
dom = tiledb.Domain(
tiledb.Dim(domain=(0, 1), tile=1, dtype=np.int64),
tiledb.Dim(domain=(0, 3), tile=4, dtype=np.int64),
)
attr_int = tiledb.Attr("ints", dtype=int)
attr_float = tiledb.Attr("floats", dtype=float)
schema = tiledb.ArraySchema(domain=dom, attrs=(attr_int, attr_float))
tiledb.DenseArray.create(self.path("foo"), schema)
V_ints = np.array([[0, 1, 2, 3], [4, 6, 7, 5]])
V_floats = np.array([[0.0, 1.0, 2.0, 3.0], [4.0, 6.0, 7.0, 5.0]])
V = {"ints": V_ints, "floats": V_floats}
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = V
# check setting attribute in different order from Attr definition
# https://github.com/TileDB-Inc/TileDB-Py/issues/299
V2 = {"floats": V_floats, "ints": V_ints}
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = V
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
R = T[:]
assert_array_equal(V["ints"], R["ints"])
assert_array_equal(V["floats"], R["floats"])
R = T.query(attrs=("ints",))[1:3]
assert_array_equal(V["ints"][1:3], R["ints"])
R = T.query(attrs=("floats",), order="F")[:]
self.assertTrue(R["floats"].flags.f_contiguous)
R = T.query(attrs=("ints",), coords=True)[0, 0:3]
self.assertTrue("__dim_0" in R)
self.assertTrue("__dim_1" in R)
assert_array_equal(R["__dim_0"], np.array([0, 0, 0]))
assert_array_equal(R["__dim_1"], np.array([0, 1, 2]))
# Global order returns results as a linear buffer
R = T.query(attrs=("ints",), order="G")[:]
self.assertEqual(R["ints"].shape, (8,))
with self.assertRaises(tiledb.TileDBError):
T.query(attrs=("unknown",))[:]
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
# check error ncells length
V["ints"] = V["ints"][1:2].copy()
with self.assertRaises(tiledb.TileDBError):
T[:] = V
# check error attribute does not exist
V["foo"] = V["ints"].astype(np.int8)
with self.assertRaises(tiledb.TileDBError):
T[:] = V
def test_array_2d_s1(self):
# This array is currently read back with dtype object
A = np.array([["A", "B"], ["C", ""]], dtype="S")
uri = self.path()
dom = tiledb.Domain(
tiledb.Dim(name="rows", domain=(0, 1), tile=2, dtype=np.int64),
tiledb.Dim(name="cols", domain=(0, 1), tile=2, dtype=np.int64),
)
schema = tiledb.ArraySchema(
domain=dom, sparse=False, attrs=[tiledb.Attr(name="a", dtype="S")]
)
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[...] = A
with tiledb.DenseArray(uri) as T:
assert_array_equal(A, T)
res = T.multi_index[(0, 1), (0, 1)]["a"]
assert_array_equal(A, res)
def test_nd_roundtrip(self):
dim_set = np.int64([3 + x % 2 for x in range(2, 12)])
for i, last in enumerate(range(2, len(dim_set))):
dims = dim_set[:last]
data = np.random.rand(*dims).astype("int32")
with tiledb.from_numpy(self.path(f"nd_roundtrip{i}"), data) as A:
assert_array_equal(data, A[:])
def test_array_2d_s3_mixed(self):
# This array is currently read back with dtype object
A = np.array([["AAA", "B"], ["AB", "C"]], dtype="S3")
uri = self.path()
dom = tiledb.Domain(
tiledb.Dim(name="rows", domain=(0, 1), tile=2, dtype=np.int64),
tiledb.Dim(name="cols", domain=(0, 1), tile=2, dtype=np.int64),
)
schema = tiledb.ArraySchema(
domain=dom, sparse=False, attrs=[tiledb.Attr(name="a", dtype="S3")]
)
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[...] = A
with tiledb.DenseArray(uri) as T:
assert_array_equal(A, T)
res = T.multi_index[(0, 1), (0, 1)]["a"]
assert_array_equal(A, res)
def test_incomplete_dense(self):
path = self.path("incomplete_dense")
# create 10 MB array
data = np.arange(1310720, dtype=np.int64)
# if `tile` is not set, it defaults to the full array and we
# only read 8 bytes at a time.
use_tile = 131072
# use_tile = None
with tiledb.from_numpy(path, data, tile=use_tile) as A:
pass
# create context with 1 MB memory budget (2 MB total, 1 MB usable)
config = tiledb.Config(
{"sm.memory_budget": 2 * 1024 ** 2, "py.init_buffer_bytes": 1024 ** 2}
)
self.assertEqual(config["py.init_buffer_bytes"], str(1024 ** 2))
# TODO would be good to check repeat count here. Not currently exposed by retry loop.
with tiledb.DenseArray(path, ctx=tiledb.Ctx(config)) as A:
res_mr = A.multi_index[slice(0, len(data) - 1)]
assert_array_equal(res_mr[""], data)
res_idx = A[:]
assert_array_equal(res_idx, data)
df = A.df[:]
assert_array_equal(df[""], data)
def test_written_fragment_info(self):
uri = self.path("test_written_fragment_info")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 9), tile=10, dtype=np.int64))
att = tiledb.Attr(dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[:] = np.arange(0, 10, dtype=np.int64)
self.assertTrue(T.last_write_info is not None)
self.assertTrue(len(T.last_write_info.keys()) == 1)
t_w1, t_w2 = list(T.last_write_info.values())[0]
self.assertTrue(t_w1 > 0)
self.assertTrue(t_w2 > 0)
def test_missing_schema_error(self):
uri = self.path("test_missing_schema_error")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 9), tile=10, dtype=np.int64))
att = tiledb.Attr(dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[:] = np.arange(0, 10, dtype=np.int64)
if tiledb.libtiledb.version() < (2, 4):
tiledb.VFS().remove_file(os.path.join(uri, "__array_schema.tdb"))
else:
tiledb.VFS().remove_dir(os.path.join(uri, "__schema"))
# new ctx is required running against S3 because otherwise the schema
# will simply be read from the cache.
with tiledb.scope_ctx():
with self.assertRaises(tiledb.TileDBError):
tiledb.DenseArray(uri)
@pytest.mark.xfail(
tiledb.libtiledb.version() >= (2, 5),
reason="Skip sparse_write_to_dense with libtiledb 2.5+",
)
def test_sparse_write_to_dense(self):
class AssignAndCheck:
def __init__(self, outer, *shape):
self.outer = outer
self.shape = shape
def __setitem__(self, s, v):
A = np.random.rand(*self.shape)
uri = self.outer.path(
f"sparse_write_to_dense{random.randint(0,np.uint64(-1))}"
)
tiledb.from_numpy(uri, A).close()
with tiledb.open(uri, "w") as B:
B[s] = v
A[s] = v
with tiledb.open(uri) as B:
assert_array_equal(A, B[:])
D = AssignAndCheck(self, 5, 5)
with pytest.warns(
DeprecationWarning, match="Sparse writes to dense arrays is deprecated"
):
D[np.array([1, 2]), np.array([0, 0])] = np.array([0, 2])
def test_reopen_dense_array(self):
uri = self.path("test_reopen_dense_array")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 9), tile=10, dtype=np.int64))
att = tiledb.Attr(dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(uri, schema)
data = np.arange(0, 10, dtype=np.int64)
with tiledb.DenseArray(uri, mode="w", timestamp=1) as T:
T[:] = data
with tiledb.DenseArray(uri, mode="w", timestamp=2) as T:
T[:] = data * 2
T = tiledb.DenseArray(uri, mode="r", timestamp=1)
assert_array_equal(T[:], data)
T.reopen()
assert_array_equal(T[:], data * 2)
T.close()
class TestVarlen(DiskTestCase):
def test_varlen_write_bytes(self):
A = np.array(
[
"aa",
"bbb",
"ccccc",
"ddddddddddddddddddddd",
"ee",
"ffffff",
"g",
"hhhhhhhhhh",
],
dtype=bytes,
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.bytes_)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:], T[:])
assert_array_equal(A, T.multi_index[1 : len(A)][""])
def test_varlen_sparse_all_empty_strings(self):
# this test addresses a fix for specific need for reads on a
# large existing array, see
# https://github.com/TileDB-Inc/TileDB-Py/pull/475
# we currently have to write a placeholder at the end to
# avoid zero-length cell error
# TODO: follow-up with improved testing for empty var-length/strings
A = np.array(["", "", "", "", "", "\x00"], dtype=object)
dim_len = len(A)
uri = self.path("varlen_all_empty_strings")
dom = tiledb.Domain(tiledb.Dim(domain=(1, dim_len), tile=dim_len))
att = tiledb.Attr(name="a1", dtype=np.str_, var=True)
schema = tiledb.ArraySchema(dom, (att,), sparse=True)
tiledb.Array.create(uri, schema)
with tiledb.open(uri, mode="w") as T:
T[np.arange(1, dim_len + 1)] = {"a1": A}
with tiledb.open(uri, mode="r") as T:
# check interior range
assert_array_equal(A[1:-1], T[2:-1]["a1"])
assert_array_equal(A[1:-1], T.multi_index[2 : dim_len - 1]["a1"])
def test_varlen_write_unicode(self):
A = np.array(
[
"aa",
"bbb",
"ccccc",
"ddddddddddddddddddddd",
"ee",
"ffffff",
"g",
"",
"hhhhhhhhhh",
],
dtype=np.unicode_,
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.unicode_, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:], T[:])
def test_varlen_write_floats(self):
# Generates 8 variable-length float64 subarrays (subarray len and content are randomized)
A = np.array(
[np.random.rand(x) for x in np.random.randint(1, 12, 8)], dtype=object
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.float64, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
T_ = T[:]
# TODO/note: the return is a 0-element array.
assert_array_equal(A[0], T[1][()])
assert_array_equal(A[-1], T[-1][()])
self.assertEqual(len(A), len(T_))
# can't use assert_array_equal w/ object array
self.assertTrue(all(np.array_equal(x, A[i]) for i, x in enumerate(T_)))
def test_varlen_write_floats_2d(self):
A = np.array(
[np.random.rand(x) for x in np.arange(1, 10)], dtype=object
).reshape(3, 3)
# basic write
dom = tiledb.Domain(
tiledb.Dim(domain=(1, 3), tile=len(A)),
tiledb.Dim(domain=(1, 3), tile=len(A)),
)
att = tiledb.Attr(dtype=np.float64, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
T_ = T[:]
self.assertEqual(len(A), len(T_))
# can't use assert_array_equal w/ object array
self.assertTrue(
np.all(
[np.array_equal(A.flat[i], T[:].flat[i]) for i in np.arange(0, 9)]
)
)
def test_varlen_write_int_subarray(self):
A = np.array(
list(
map(
lambda x: np.array(x, dtype=np.uint64),
[np.arange(i, 2 * i + 1) for i in np.arange(0, 16)],
)
),
dtype="O",
).reshape(4, 4)
uri = self.path("test_varlen_write_int_subarray")
dom = tiledb.Domain(
tiledb.Dim(domain=(0, 3), tile=len(A)),
tiledb.Dim(domain=(0, 3), tile=len(A)),
)
att = tiledb.Attr(dtype=np.uint64, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(uri, schema)
# NumPy forces single-element object arrays into a contiguous layout
# so we alternate the size to get a consistent baseline array.
A_onestwos = np.array(
list(
map(
lambda x: np.array(x, dtype=np.uint64),
list([(1,) if x % 2 == 0 else (1, 2) for x in range(16)]),
)
),
dtype=np.dtype("O"),
).reshape(4, 4)
with tiledb.open(uri, "w") as T:
T[:] = A_onestwos
with tiledb.open(uri, "w") as T:
T[1:3, 1:3] = A[1:3, 1:3]
A_assigned = A_onestwos.copy()
A_assigned[1:3, 1:3] = A[1:3, 1:3]
with tiledb.open(uri) as T:
assert_subarrays_equal(A_assigned, T[:])
def test_varlen_write_fixedbytes(self):
# The actual dtype of this array is 'S21'
A = np.array(
[
"aa",
"bbb",
"ccccc",
"ddddddddddddddddddddd",
"ee",
"ffffff",
"g",
"hhhhhhhhhh",
],
dtype=np.dtype("S"),
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.bytes_)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:], T[:])
def test_varlen_write_fixedunicode(self):
A = np.array(
[
"aa",
"bbb",
"ccccc",
"ddddddddddddddddddddd",
"ee",
"ffffff",
"",
"g",
"hhhhhhhhhh",
],
dtype=np.dtype("U"),
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.unicode_)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
assert_array_equal(A[:], T[:])
def test_varlen_write_ints(self):
A = np.array(
[
np.uint64(np.random.randint(0, pow(10, 6), x))
for x in np.random.randint(1, 12, 8)
],
dtype=object,
)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(A)), tile=len(A)))
att = tiledb.Attr(dtype=np.int64, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
T_ = T[:]
self.assertEqual(len(A), len(T))
# can't use assert_array_equal w/ object array
self.assertTrue(all(np.array_equal(x, A[i]) for i, x in enumerate(T_)))
def test_varlen_wrong_domain(self):
A = np.array(
[
"aa",
"bbb",
"ccccc",
"ddddddddddddddddddddd",
"ee",
"ffffff",
"g",
"hhhhhhhhhh",
]
)
dom = tiledb.Domain(tiledb.Dim(domain=(1, 3), tile=3))
att = tiledb.Attr(dtype=np.bytes_)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
with self.assertRaises(tiledb.TileDBError):
T[:] = A
def test_array_varlen_mismatched(self):
# Test that we raise a TypeError when passing a heterogeneous object array.
A = np.array([b"aa", b"bbb", b"cccc", np.uint64([1, 3, 4])], dtype=object)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 3), tile=4))
att = tiledb.Attr(dtype=np.bytes_, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
with self.assertRaises(TypeError):
T[:] = A
def test_array_varlen_2d_s_fixed(self):
A = np.array(
[["AAAAAAAAAa", "BBB"], ["ACCC", "BBBCBCBCBCCCBBCBCBCCBC"]], dtype="S"
)
uri = self.path("varlen_2d_s_fixed")
dom = tiledb.Domain(
tiledb.Dim(name="rows", domain=(0, 1), tile=2, dtype=np.int64),
tiledb.Dim(name="cols", domain=(0, 1), tile=2, dtype=np.int64),
)
schema = tiledb.ArraySchema(
domain=dom, sparse=False, attrs=[tiledb.Attr(name="a", dtype="S", var=True)]
)
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as T:
T[...] = A
with tiledb.DenseArray(uri) as T:
assert_array_equal(A, T)
class TestSparseArray(DiskTestCase):
@pytest.mark.xfail
def test_simple_1d_sparse_vector(self):
dom = tiledb.Domain(tiledb.Dim(domain=(0, 3), tile=4, dtype=int))
att = tiledb.Attr(dtype=int)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=True)
tiledb.SparseArray.create(self.path("foo"), schema)
values = np.array([3, 4])
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[1, 2]] = values
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
assert_array_equal(T[[1, 2]], values)
@pytest.mark.xfail
def test_simple_2d_sparse_vector(self):
attr = tiledb.Attr(ctx, dtype=float)
dom = tiledb.Domain(
tiledb.Dim(ctx, domain=(0, 3), tile=4, dtype=int),
tiledb.Dim(ctx, domain=(0, 3), tile=4, dtype=int),
)
schema = tiledb.ArraySchema(ctx, domain=dom, attrs=(attr,), sparse=True)
tiledb.SparseArray.create(self.path("foo"), schema)
values = np.array([3, 4], dtype=float)
with tiledb.SparseArray(ctx, self.path("foo"), mode="w") as T:
T[[1, 2], [1, 2]] = values
with tiledb.SparseArray(ctx, self.path("foo"), mode="r") as T:
assert_array_equal(T[[1, 2], [1, 2]], values)
@pytest.mark.xfail
def test_simple3d_sparse_vector(self):
dom = tiledb.Domain(
ctx,
tiledb.Dim(ctx, "x", domain=(0, 3), tile=4, dtype=int),
tiledb.Dim(ctx, "y", domain=(0, 3), tile=4, dtype=int),
tiledb.Dim(ctx, "z", domain=(0, 3), tile=4, dtype=int),
)
attr = tiledb.Attr(ctx, dtype=float)
schema = tiledb.ArraySchema(ctx, domain=dom, attrs=(attr,), sparse=True)
tiledb.SparseArray.create(self.path("foo"), schema)
values = np.array([3, 4], dtype=float)
with tiledb.SparseArray(ctx, self.path("foo"), mode="w") as T:
T[[1, 2], [1, 2], [1, 2]] = values
with tiledb.SparseArray(ctx, self.path("foo"), mode="r") as T:
assert_array_equal(T[[1, 2], [1, 2], [1, 2]], values)
@pytest.mark.xfail
def test_sparse_ordered_fp_domain(self):
dom = tiledb.Domain(tiledb.Dim("x", domain=(0.0, 10.0), tile=2.0, dtype=float))
attr = tiledb.Attr(dtype=float)
attr = tiledb.Attr(dtype=float)
schema = tiledb.ArraySchema(domain=dom, attrs=(attr,), sparse=True)
tiledb.SparseArray.create(self.path("foo"), schema)
values = np.array([3.3, 2.7])
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[2.5, 4.2]] = values
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
assert_array_equal(T[[2.5, 4.2]], values)
@pytest.mark.xfail
def test_sparse_unordered_fp_domain(self):
dom = tiledb.Domain(tiledb.Dim("x", domain=(0.0, 10.0), tile=2.0, dtype=float))
attr = tiledb.Attr(dtype=float)
schema = tiledb.ArraySchema(domain=dom, attrs=(attr,), sparse=True)
tiledb.SparseArray.create(self.path("foo"), schema)
values = np.array([3.3, 2.7])
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[4.2, 2.5]] = values
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
assert_array_equal(T[[2.5, 4.2]], values[::-1])
@pytest.mark.xfail
def test_multiple_attributes(self):
uri = self.path()
dom = tiledb.Domain(
tiledb.Dim(domain=(1, 10), tile=10, dtype=int),
tiledb.Dim(domain=(1, 10), tile=10, dtype=int),
)
attr_int = tiledb.Attr("ints", dtype=int)
attr_float = tiledb.Attr("floats", dtype="float")
schema = tiledb.ArraySchema(
domain=dom, attrs=(attr_int, attr_float), sparse=True
)
tiledb.SparseArray.create(self.path("foo"), schema)
I = np.array([1, 1, 1, 2, 3, 3, 3, 4])
J = np.array([1, 2, 4, 3, 1, 6, 7, 5])
V_ints = np.array([0, 1, 2, 3, 4, 6, 7, 5])
V_floats = np.array([0.0, 1.0, 2.0, 3.0, 4.0, 6.0, 7.0, 5.0])
V = {"ints": V_ints, "floats": V_floats}
with tiledb.SparseArray(uri, mode="w") as T:
T[I, J] = V
with tiledb.SparseArray(uri, mode="r") as T:
R = T[I, J]
assert_array_equal(V["ints"], R["ints"])
assert_array_equal(V["floats"], R["floats"])
# check error attribute does not exist
# TODO: should this be an attribute error?
with tiledb.SparseArray(uri, mode="w") as T:
V["foo"] = V["ints"].astype(np.int8)
with self.assertRaises(tiledb.TileDBError):
T[I, J] = V
# check error ncells length
V["ints"] = V["ints"][1:2].copy()
with self.assertRaises(AttributeError):
T[I, J] = V
def test_query_real_multi_index(self, sparse_cell_order):
uri = self.path("query_real_multi_index")
dom = tiledb.Domain(
tiledb.Dim("x", domain=(-10.0, 10.0), tile=2.0, dtype=float)
)
attr = tiledb.Attr("a", dtype=np.float32)
schema = tiledb.ArraySchema(
domain=dom, attrs=(attr,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
values = np.array([3.3, 2.7])
with tiledb.SparseArray(uri, mode="w") as T:
T[[2.5, 4.2]] = values
with tiledb.SparseArray(uri, mode="r") as T:
assert_array_equal(
T.query(coords=True).multi_index[-10.0 : np.nextafter(4.2, 0)]["a"],
np.float32(3.3),
)
assert_array_equal(
T.query(coords=True).multi_index[-10.0 : np.nextafter(4.2, 0)]["x"],
np.float32([2.5]),
)
assert_array_equal(
T.query(coords=False).multi_index[-10.0:5.0]["a"],
np.float32([3.3, 2.7]),
)
self.assertTrue(
"coords" not in T.query(coords=False).multi_index[-10.0:5.0]
)
@pytest.mark.parametrize("dtype", INTEGER_DTYPES)
def test_sparse_index_dtypes(self, dtype):
path = self.path()
data = np.arange(0, 3).astype(dtype)
schema = schema_from_dict(attrs={"attr": data}, dims={"d0": data})
tiledb.SparseArray.create(path, schema)
with tiledb.open(path, "w") as A:
A[data] = data
with tiledb.open(path) as B:
assert_array_equal(B[:]["attr"], data)
assert B[data[0]]["attr"] == data[0]
assert B[data[1]]["attr"] == data[1]
assert B.multi_index[data[0]]["attr"] == data[0]
def test_query_real_exact(self, sparse_cell_order):
"""
Test and demo of querying at floating point representable boundaries
Concise representation of expected behavior:
c0,c1,c2 = [3.0100000000000002, 3.0100000000000007, 3.010000000000001]
values = [1,2,3]
[c0:c0] -> [1]
[c1:c1] -> [2]
[c2:c2] -> [3]
[c0:c1] -> [1,2]
[c0:c2] -> [1,2,3]
[c0 - nextafter(c0,0) : c0] -> [1]
[c0 - nextafter(c0,0) : c0 - nextafter(c0,0)] -> []
[c2:c2+nextafter(c2)] -> [3]
[c2+nextafter(c2) : c2+nextafter(c2)] -> []
"""
uri = self.path()
dom = tiledb.Domain(
tiledb.Dim("x", domain=(-10.0, 10.0), tile=2.0, dtype=float)
)
attr = tiledb.Attr("", dtype=np.float32)
schema = tiledb.ArraySchema(
domain=dom, attrs=(attr,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
c0 = np.nextafter(3.01, 4) # smaller
c1 = np.nextafter(c0, 4)
c2 = np.nextafter(c1, 4) # larger
# for debugging use:
# np.set_printoptions(precision=16, floatmode='maxprec')
# print(c0,c1,c2)
values = np.array([1, 2, 3])
with tiledb.SparseArray(uri, mode="w") as T:
T[[c0, c1, c2]] = values
with tiledb.SparseArray(uri, mode="r") as T:
for i, c in enumerate([c0, c1, c2]):
assert_array_equal(T.query(coords=True).multi_index[c:c][""], values[i])
# test (coord, coord + nextafter)
c0_prev = np.nextafter(c0, 0)
c2_next = np.nextafter(c2, 4)
assert_array_equal(T.query(coords=True).multi_index[c0:c1][""], [1, 2])
assert_array_equal(T.query(coords=True).multi_index[c0:c2][""], [1, 2, 3])
assert_array_equal(T.query(coords=True).multi_index[c2:c2_next][""], 3)
assert_array_equal(T.query(coords=True).multi_index[c0_prev:c0][""], 1)
assert_array_equal(
T.query(coords=True).multi_index[c0_prev:c0_prev][""], []
)
# test (coord + nextafter, coord + nextafter)
assert_array_equal(
T.query(coords=True).multi_index[c2_next:c2_next][""], np.array([])
)
# test (coord - nextafter, coord)
assert_array_equal(
T.query(coords=True).multi_index[c0:c1][""], values[[0, 1]]
)
# test (coord - nextafter, coord + nextafter)
assert_array_equal(
T.query(coords=True).multi_index[c0:c2][""], values[[0, 1, 2]]
)
def test_sparse_query_specified_dim_coords(self, sparse_cell_order):
uri = self.path("sparse_query_specified_dim_coords")
dom = tiledb.Domain(
tiledb.Dim("i", domain=(1, 10), tile=1, dtype=int),
tiledb.Dim("j", domain=(11, 20), tile=1, dtype=int),
)
att = tiledb.Attr("", dtype=int)
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
i = np.array([1, 1, 1, 2, 3, 3, 3, 4])
j = np.array([11, 12, 14, 13, 11, 16, 17, 15])
with tiledb.SparseArray(uri, mode="w") as A:
A[i, j] = np.array([0, 1, 2, 3, 4, 6, 7, 5])
# data is returned in Hilbert order, so we need to check sorted
with tiledb.SparseArray(uri, mode="r") as A:
Ai = A.query(dims=["i"])[:]
self.assertTrue("i" in Ai)
self.assertFalse("j" in Ai)
assert_unordered_equal(Ai["i"], i, sparse_cell_order == "hilbert")
Aj = A.query(dims=["j"])[:]
self.assertFalse("i" in Aj)
self.assertTrue("j" in Aj)
assert_unordered_equal(Aj["j"], j, sparse_cell_order == "hilbert")
Aij = A.query(dims=["i", "j"])[:]
self.assertTrue("i" in Aij)
self.assertTrue("j" in Aij)
assert_unordered_equal(Aij["i"], i, sparse_cell_order == "hilbert")
assert_unordered_equal(Aij["j"], j, sparse_cell_order == "hilbert")
def test_dense_query_specified_dim_coords(self):
uri = self.path("dense_query_specified_dim_coords")
dom = tiledb.Domain(
tiledb.Dim("i", domain=(1, 3), tile=1, dtype=int),
tiledb.Dim("j", domain=(4, 6), tile=1, dtype=int),
)
att = tiledb.Attr("", dtype=int)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=False)
tiledb.DenseArray.create(uri, schema)
with tiledb.DenseArray(uri, mode="w") as A:
A[:, :] = np.arange(9)
with tiledb.DenseArray(uri, mode="r") as A:
i = np.array([[1, 1, 1], [2, 2, 2], [3, 3, 3]])
j = np.array([[4, 5, 6], [4, 5, 6], [4, 5, 6]])
Ai = A.query(dims=["i"])[:]
self.assertTrue("i" in Ai)
self.assertFalse("j" in Ai)
assert_array_equal(Ai["i"], i)
Aj = A.query(dims=["j"])[:]
self.assertFalse("i" in Aj)
self.assertTrue("j" in Aj)
assert_array_equal(Aj["j"], j)
Aij = A.query(dims=["i", "j"])[:]
self.assertTrue("i" in Aij)
self.assertTrue("j" in Aij)
assert_array_equal(Aij["i"], i)
assert_array_equal(Aij["j"], j)
def test_subarray(self, sparse_cell_order):
dom = tiledb.Domain(tiledb.Dim("x", domain=(1, 10000), tile=100, dtype=int))
att = tiledb.Attr("", dtype=float)
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(self.path("foo"), schema)
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertIsNone(T.nonempty_domain())
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[50, 60, 100]] = [1.0, 2.0, 3.0]
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertEqual(((50, 100),), T.nonempty_domain())
# retrieve just valid coordinates in subarray T[40:60]
assert_array_equal(T[40:61]["x"], [50, 60])
# TODO: dropping coords with one anon value returns just an array
res = T.query(coords=False)[40:61]
assert_array_equal(res[""], [1.0, 2.0])
self.assertEqual(("coords" in res), False)
def test_sparse_bytes(self, sparse_cell_order):
dom = tiledb.Domain(tiledb.Dim("x", domain=(1, 10000), tile=100, dtype=int))
att = tiledb.Attr("", var=True, dtype=np.bytes_)
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(self.path("foo"), schema)
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertIsNone(T.nonempty_domain())
A = np.array(
[b"aaa", b"bbbbbbbbbbbbbbbbbbbb", b"ccccccccccccccccccccccccc"],
dtype=np.bytes_,
)
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[50, 60, 100]] = A
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertEqual(((50, 100),), T.nonempty_domain())
# retrieve just valid coordinates in subarray T[40:60]
assert_array_equal(T[40:61]["x"], [50, 60])
# TODO: dropping coords with one anon value returns just an array
res = T.query(coords=False)[40:61]
assert_array_equal(res[""], A[0:2])
self.assertEqual(("coords" in res), False)
# empty sparse varlen result
res = T[1000]
assert_array_equal(res[""], np.array("", dtype="S1"))
assert_array_equal(res["x"], np.array([], dtype=np.int64))
def test_sparse_unicode(self, sparse_cell_order):
dom = tiledb.Domain(tiledb.Dim("x", domain=(1, 10000), tile=100, dtype=int))
att = tiledb.Attr("", var=True, dtype=np.unicode_)
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(self.path("foo"), schema)
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertIsNone(T.nonempty_domain())
A = np_array = np.array(
[
"1234545lkjalsdfj",
"mnopqrs",
"ijkl",
"gh",
"abcdef",
"aαbββcγγγdδδδδ",
"aαbββc",
"",
"γγγdδδδδ",
],
dtype=object,
)
with tiledb.SparseArray(self.path("foo"), mode="w") as T:
T[[3, 4, 5, 6, 7, 50, 60, 70, 100]] = A
with tiledb.SparseArray(self.path("foo"), mode="r") as T:
self.assertEqual(((3, 100),), T.nonempty_domain())
# retrieve just valid coordinates in subarray T[40:60]
assert_array_equal(T[40:61]["x"], [50, 60])
# TODO: dropping coords with one anon value returns just an array
res = T.query(coords=False)[40:61]
assert_array_equal(res[""], A[5:7])
self.assertEqual(("coords" in res), False)
# empty sparse varlen result
res = T[1000]
assert_array_equal(res[""], np.array("", dtype="U1"))
assert_array_equal(res["x"], np.array([], dtype=np.int64))
def test_sparse_query(self, sparse_cell_order):
uri = self.path("test_sparse_query")
dom = tiledb.Domain(
tiledb.Dim("x", domain=(1, 10000), tile=100, dtype=np.float64)
)
att = tiledb.Attr("", dtype=float)
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
coords = np.random.uniform(low=1, high=10000, size=100)
data = np.random.rand(100)
with tiledb.SparseArray(uri, mode="w") as T:
T[coords] = data
# Test that TILEDB_UNORDERED works correctly
with tiledb.SparseArray(uri, mode="r") as A:
res = A[1:10001][""] # index past the end here to ensure inclusive result
res = A.multi_index[1:10000][""]
assert_array_equal(np.sort(res), np.sort(data))
res = A.query(order="U").multi_index[1:10000][""]
assert_array_equal(np.sort(res), np.sort(data))
def test_sparse_fixes(self, sparse_cell_order):
uri = self.path("test_sparse_fixes")
# indexing a 1 element item in a sparse array
# (issue directly reported)
# the test here is that the indexing does not raise
dims = (
tiledb.Dim("foo", domain=(0, 6), tile=2),
tiledb.Dim("bar", domain=(0, 6), tile=1),
tiledb.Dim("baz", domain=(0, 100), tile=1),
)
dom = tiledb.Domain(*dims)
att = tiledb.Attr(name="strattr", dtype="S1")
schema = tiledb.ArraySchema(
domain=dom, attrs=(att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
with tiledb.SparseArray(uri) as T:
T[:]
# - test that assigning incompatible value to fixed-len str raises error
# - test that value-conversion error raises exception w/ attr name context
c = np.vstack(
list((x, y, z) for x in range(7) for y in range(7) for z in range(101))
)
with tiledb.SparseArray(uri, "w") as T:
with self.assertRaises(ValueError):
T[c[:, 0], c[:, 1], c[:, 2]] = {"strattr": np.random.rand(7, 7, 101)}
save_exc = list()
try:
T[c[:, 0], c[:, 1], c[:, 2]] = {"strattr": np.random.rand(7, 7, 101)}
except ValueError as e:
save_exc.append(e)
exc = save_exc.pop()
self.assertEqual(
str(exc.__context__),
"Cannot write a string value to non-string typed attribute 'strattr'!",
)
@tiledb.scope_ctx({"sm.check_coord_dups": False})
def test_sparse_fixes_ch1560(self, sparse_cell_order):
uri = self.path("sparse_fixes_ch1560")
schema = tiledb.ArraySchema(
domain=tiledb.Domain(
*[tiledb.Dim(name="id", domain=(1, 5000), tile=25, dtype="int32")]
),
attrs=[
tiledb.Attr(name="a1", dtype="datetime64[s]"),
tiledb.Attr(name="a2", dtype="|S0"),
tiledb.Attr(name="a3", dtype="|S0"),
tiledb.Attr(name="a4", dtype="int32"),
tiledb.Attr(name="a5", dtype="int8"),
tiledb.Attr(name="a6", dtype="int32"),
],
cell_order=sparse_cell_order,
tile_order="row-major",
sparse=True,
)
tiledb.SparseArray.create(uri, schema)
data = OrderedDict(
[
(
"a1",
np.array(
[
"2017-04-01T04:00:00",
"2019-10-01T00:00:00",
"2019-10-01T00:00:00",
"2019-10-01T00:00:00",
],
dtype="datetime64[s]",
),
),
("a2", [b"Bus", b"The RIDE", b"The RIDE", b"The RIDE"]),
("a3", [b"Bus", b"The RIDE", b"The RIDE", b"The RIDE"]),
("a4", np.array([6911721, 138048, 138048, 138048], dtype="int32")),
("a5", np.array([20, 23, 23, 23], dtype="int8")),
("a6", np.array([345586, 6002, 6002, 6002], dtype="int32")),
]
)
with tiledb.open(uri, "w") as A:
A[[1, 462, 462, 462]] = data
with tiledb.open(uri) as A:
res = A[:]
res.pop("id")
for k, v in res.items():
if isinstance(data[k], (np.ndarray, list)):
assert_array_equal(res[k], data[k])
else:
self.assertEqual(res[k], data[k])
def test_sparse_2d_varlen_int(self, sparse_cell_order):
path = self.path("test_sparse_2d_varlen_int")
dtype = np.int32
dom = tiledb.Domain(
tiledb.Dim(domain=(1, 4), tile=2), tiledb.Dim(domain=(1, 4), tile=2)
)
att = tiledb.Attr(dtype=dtype, var=True)
schema = tiledb.ArraySchema(
dom, (att,), sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(path, schema)
if tiledb.libtiledb.version() >= (2, 3) and sparse_cell_order == "hilbert":
c1 = np.array([2, 1, 3, 4])
c2 = np.array([1, 2, 3, 4])
else:
c1 = np.array([1, 2, 3, 4])
c2 = np.array([2, 1, 3, 4])
data = np.array(
[
np.array([1, 1], dtype=np.int32),
np.array([2], dtype=np.int32),
np.array([3, 3, 3], dtype=np.int32),
np.array([4], dtype=np.int32),
],
dtype="O",
)
with tiledb.SparseArray(path, "w") as A:
A[c1, c2] = data
with tiledb.SparseArray(path) as A:
res = A[:]
assert_subarrays_equal(res[""], data)
assert_unordered_equal(res["__dim_0"], c1)
assert_unordered_equal(res["__dim_1"], c2)
def test_sparse_mixed_domain_uint_float64(self, sparse_cell_order):
path = self.path("mixed_domain_uint_float64")
dims = [
tiledb.Dim(name="index", domain=(0, 51), tile=11, dtype=np.uint64),
tiledb.Dim(name="dpos", domain=(-100.0, 100.0), tile=10, dtype=np.float64),
]
dom = tiledb.Domain(*dims)
attrs = [tiledb.Attr(name="val", dtype=np.float64)]
schema = tiledb.ArraySchema(
domain=dom, attrs=attrs, sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(path, schema)
data = np.random.rand(50, 63)
coords1 = np.repeat(np.arange(0, 50), 63)
coords2 = np.linspace(-100.0, 100.0, num=3150)
with tiledb.open(path, "w") as A:
A[coords1, coords2] = data
# tiledb returns coordinates in sorted order, so we need to check the output
# sorted by the first dim coordinates
sidx = np.argsort(coords1, kind="stable")
coords2_idx = np.tile(np.arange(0, 63), 50)[sidx]
with tiledb.open(path) as A:
res = A[:]
assert_subarrays_equal(
data[coords1[sidx], coords2_idx[sidx]],
res["val"],
sparse_cell_order != "hilbert",
)
a_nonempty = A.nonempty_domain()
self.assertEqual(a_nonempty[0], (0, 49))
self.assertEqual(a_nonempty[1], (-100.0, 100.0))
def test_sparse_string_domain(self, sparse_cell_order):
path = self.path("sparse_string_domain")
dom = tiledb.Domain(tiledb.Dim(name="d", domain=(None, None), dtype=np.bytes_))
att = tiledb.Attr(name="a", dtype=np.int64)
schema = tiledb.ArraySchema(
domain=dom,
attrs=(att,),
sparse=True,
cell_order=sparse_cell_order,
capacity=10000,
)
tiledb.SparseArray.create(path, schema)
data = [1, 2, 3, 4]
coords = [b"aa", b"bbb", b"c", b"dddd"]
with tiledb.open(path, "w") as A:
A[coords] = data
with tiledb.open(path) as A:
ned = A.nonempty_domain()[0]
res = A[ned[0] : ned[1]]
assert_array_equal(res["a"], data)
self.assertEqual(set(res["d"]), set(coords))
self.assertEqual(A.nonempty_domain(), ((b"aa", b"dddd"),))
def test_sparse_string_domain2(self, sparse_cell_order):
path = self.path("sparse_string_domain2")
with self.assertRaises(ValueError):
dims = [
tiledb.Dim(
name="str", domain=(None, None, None), tile=None, dtype=np.bytes_
)
]
dims = [tiledb.Dim(name="str", domain=(None, None), tile=None, dtype=np.bytes_)]
dom = tiledb.Domain(*dims)
attrs = [tiledb.Attr(name="val", dtype=np.float64)]
schema = tiledb.ArraySchema(
domain=dom, attrs=attrs, sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(path, schema)
data = np.random.rand(10)
coords = [rand_ascii_bytes(random.randint(5, 50)) for _ in range(10)]
with tiledb.open(path, "w") as A:
A[coords] = data
with tiledb.open(path) as A:
ned = A.nonempty_domain()[0]
res = A[ned[0] : ned[1]]
self.assertTrue(set(res["str"]) == set(coords))
# must check data ordered by coords
assert_array_equal(res["val"], data[np.argsort(coords, kind="stable")])
def test_sparse_mixed_domain(self, sparse_cell_order):
uri = self.path("sparse_mixed_domain")
dims = [
tiledb.Dim(name="p", domain=(-100.0, 100.0), tile=10, dtype=np.float64),
tiledb.Dim(name="str", domain=(None, None), tile=None, dtype=np.bytes_),
]
dom = tiledb.Domain(*dims)
attrs = [tiledb.Attr(name="val", dtype=np.float64)]
schema = tiledb.ArraySchema(
domain=dom, attrs=attrs, sparse=True, cell_order=sparse_cell_order
)
tiledb.SparseArray.create(uri, schema)
nrows = 5
idx_f64 = np.random.rand(nrows)
idx_str = [rand_ascii(5).encode("utf-8") for _ in range(nrows)]
data = np.random.rand(nrows)
with tiledb.SparseArray(uri, "w") as A:
A[idx_f64, idx_str] = {"val": data}
# test heterogeneous dim nonempty_domain
ned_f64 = (np.array(np.min(idx_f64)), np.array(np.max(idx_f64)))
idx_str.sort()
ned_str = idx_str[0], idx_str[-1]
with tiledb.SparseArray(uri, "r") as A:
self.assertEqual(A.nonempty_domain(), (ned_f64, ned_str))
def test_sparse_get_unique_dim_values(self, sparse_cell_order):
uri = self.path("get_non_empty_coords")
dim1 = tiledb.Dim(name="dim1", domain=(None, None), tile=None, dtype=np.bytes_)
dim2 = tiledb.Dim(name="dim2", domain=(0, 1), tile=1, dtype=np.float64)
attr = tiledb.Attr(name="attr", dtype=np.float32)
dom = tiledb.Domain(dim1, dim2)
schema = tiledb.ArraySchema(
domain=dom, sparse=True, cell_order=sparse_cell_order, attrs=[attr]
)
tiledb.Array.create(uri, schema)
with tiledb.open(uri, "w") as A:
A["a1", 0] = 1
A["a1", 0.25] = 2
A["a2", 0.5] = 3
A["a3", 0.25] = 4
with tiledb.open(uri, "r") as A:
self.assertEqual(
A.unique_dim_values(),
OrderedDict(
[("dim1", (b"a1", b"a2", b"a3")), ("dim2", (0.0, 0.25, 0.5))]
),
)
self.assertEqual(A.unique_dim_values("dim1"), (b"a1", b"a2", b"a3"))
self.assertEqual(A.unique_dim_values("dim2"), (0, 0.25, 0.5))
with self.assertRaises(ValueError):
A.unique_dim_values(0)
with self.assertRaises(ValueError):
A.unique_dim_values("dim3")
class TestDenseIndexing(DiskTestCase):
def _test_index(self, A, T, idx):
expected = A[idx]
actual = T[idx]
assert_array_equal(expected, actual)
good_index_1d = [
# single value
42,
-1,
# slices
slice(0, 1050),
slice(50, 150),
slice(0, 2000),
slice(-150, -50),
# TODO: indexing failures
# slice(-2000, 2000),
# slice(0, 0), # empty result
# slice(-1, 0), # empty result
# total selections
slice(None),
Ellipsis,
(),
(Ellipsis, slice(None)),
# slice with step
slice(None),
slice(None, None),
slice(None, None, 1),
slice(None, None, 10),
slice(None, None, 100),
slice(None, None, 1000),
slice(None, None, 10000),
slice(0, 1050),
slice(0, 1050, 1),
slice(0, 1050, 10),
slice(0, 1050, 100),
slice(0, 1050, 1000),
slice(0, 1050, 10000),
slice(1, 31, 3),
slice(1, 31, 30),
slice(1, 31, 300),
slice(81, 121, 3),
slice(81, 121, 30),
slice(81, 121, 300),
slice(50, 150),
slice(50, 150, 1),
slice(50, 150, 10),
# TODO: negative steps
slice(None, None, -1),
slice(None, None, -10),
slice(None, None, -100),
slice(None, None, -1000),
slice(None, None, -10000),
# slice(1050, -1, -1),
# slice(1050, -1, -10),
# slice(1050, -1, -100),
# slice(1050, -1, -1000),
# slice(1050, -1, -10000),
# slice(1050, 0, -1),
# slice(1050, 0, -10),
# slice(1050, 0, -100),
# slice(1050, 0, -1000),
# slice(1050, 0, -10000),
# slice(150, 50, -1),
# slice(150, 50, -10),
# slice(31, 1, -3),
# slice(121, 81, -3),
# slice(-1, 0, -1),
]
bad_index_1d = [2.3, "foo", b"xxx", None, (0, 0), (slice(None), slice(None))]
def test_index_1d(self):
A = np.arange(1050, dtype=int)
dom = tiledb.Domain(tiledb.Dim(domain=(0, 1049), tile=100))
att = tiledb.Attr(dtype=int)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
for idx in self.good_index_1d:
self._test_index(A, T, idx)
for idx in self.bad_index_1d:
with self.assertRaises(IndexError):
T[idx]
good_index_2d = [
# single row
42,
-1,
(42, slice(None)),
(-1, slice(None)),
# single col
(slice(None), 4),
(slice(None), -1),
# row slices
slice(None),
slice(0, 1000),
slice(250, 350),
slice(0, 2000),
slice(-350, -250),
slice(0, 0), # empty result
slice(-1, 0), # empty result
slice(-2000, 0),
slice(-2000, 2000),
# 2D slices
(slice(None), slice(1, 5)),
(slice(250, 350), slice(None)),
(slice(250, 350), slice(1, 5)),
(slice(250, 350), slice(-5, -1)),
(slice(250, 350), slice(-50, 50)),
(slice(250, 350, 10), slice(1, 5)),
(slice(250, 350), slice(1, 5, 2)),
(slice(250, 350, 33), slice(1, 5, 3)),
# total selections
(slice(None), slice(None)),
Ellipsis,
(),
(Ellipsis, slice(None)),
(Ellipsis, slice(None), slice(None)),
# TODO: negative steps
# slice(None, None, -1),
# (slice(None, None, -1), slice(None)),
]
bad_index_2d = [
2.3,
"foo",
b"xxx",
None,
(2.3, slice(None)),
(0, 0, 0),
(slice(None), slice(None), slice(None)),
]
def test_index_2d(self):
A = np.arange(10000).reshape((1000, 10))
dom = tiledb.Domain(
tiledb.Dim(domain=(0, 999), tile=100), tiledb.Dim(domain=(0, 9), tile=2)
)
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(self.path("foo"), schema)
with tiledb.DenseArray(self.path("foo"), mode="w") as T:
T[:] = A
with tiledb.DenseArray(self.path("foo"), mode="r") as T:
for idx in self.good_index_1d:
self._test_index(A, T, idx)
for idx in self.bad_index_2d:
with self.assertRaises(IndexError):
T[idx]
class TestFilterTest(unittest.TestCase):
def test_filter(self):
gzip_filter = tiledb.libtiledb.GzipFilter(level=10)
self.assertIsInstance(gzip_filter, tiledb.libtiledb.Filter)
self.assertEqual(gzip_filter.level, 10)
bw_filter = tiledb.libtiledb.BitWidthReductionFilter(window=10)
self.assertIsInstance(bw_filter, tiledb.libtiledb.Filter)
self.assertEqual(bw_filter.window, 10)
filter_list = tiledb.libtiledb.FilterList(
[gzip_filter, bw_filter], chunksize=1024
)
self.assertEqual(filter_list.chunksize, 1024)
self.assertEqual(len(filter_list), 2)
self.assertEqual(filter_list[0].level, gzip_filter.level)
self.assertEqual(filter_list[1].window, bw_filter.window)
# test filter list iteration
self.assertEqual(len(list(filter_list)), 2)
# test `filters` kwarg accepts python list of filters
tiledb.Attr("foo", dtype=np.int64, filters=[gzip_filter])
tiledb.Attr("foo", dtype=np.int64, filters=(gzip_filter,))
attr = tiledb.Attr("foo", dtype=np.int64, filters=filter_list)
self.assertEqual(len(attr.filters), 2)
self.assertEqual(attr.filters.chunksize, filter_list.chunksize)
def test_filter_list(self):
# should be constructible without a `filters` keyword arg set
filter_list1 = tiledb.FilterList()
filter_list1.append(tiledb.GzipFilter())
self.assertEqual(len(filter_list1), 1)
filter_list2 = [x for x in filter_list1]
attr = tiledb.Attr(filters=filter_list2)
self.assertEqual(len(attr.filters), 1)
def test_all_filters(self):
# test initialization
filters = [
tiledb.NoOpFilter(),
tiledb.GzipFilter(),
tiledb.ZstdFilter(),
tiledb.LZ4Filter(),
tiledb.RleFilter(),
tiledb.Bzip2Filter(),
tiledb.DoubleDeltaFilter(),
tiledb.BitWidthReductionFilter(),
tiledb.BitShuffleFilter(),
tiledb.ByteShuffleFilter(),
tiledb.PositiveDeltaFilter(),
tiledb.ChecksumSHA256Filter(),
tiledb.ChecksumMD5Filter(),
]
# make sure that repr works and round-trips correctly
for f in filters:
# some of these have attributes, so we just check the class name here
self.assertTrue(type(f).__name__ in repr(f))
tmp_globals = dict()
setup = "from tiledb import *"
exec(setup, tmp_globals)
filter_repr = repr(f)
new_filter = None
try:
new_filter = eval(filter_repr, tmp_globals)
except Exception as exc:
warn_str = (
"""Exception during FilterTest filter repr eval"""
+ """, filter repr string was:\n"""
+ """'''"""
+ """\n{}\n'''""".format(filter_repr)
)
warnings.warn(warn_str)
raise
self.assertEqual(new_filter, f)
class TestDatetimeSlicing(DiskTestCase):
def test_dense_datetime_vector(self):
uri = self.path("foo_datetime_vector")
# Domain is 10 years, day resolution, one tile per 365 days
dim = tiledb.Dim(
name="d1",
domain=(np.datetime64("2010-01-01"), np.datetime64("2020-01-01")),
tile=np.timedelta64(365, "D"),
dtype=np.datetime64("", "D").dtype,
)
dom = tiledb.Domain(dim)
schema = tiledb.ArraySchema(
domain=dom, attrs=(tiledb.Attr("a1", dtype=np.float64),)
)
tiledb.Array.create(uri, schema)
# Write a few years of data at the beginning using a timedelta object
ndays = 365 * 2
a1_vals = np.random.rand(ndays)
start = np.datetime64("2010-01-01")
# Datetime indexing is inclusive, so a delta of one less
end = start + np.timedelta64(ndays - 1, "D")
with tiledb.DenseArray(uri, "w") as T:
T[start:end] = {"a1": a1_vals}
# Read back data
with tiledb.DenseArray(uri, "r", attr="a1") as T:
assert_array_equal(T[start:end], a1_vals)
# Check nonempty domain
with tiledb.DenseArray(uri, "r") as T:
nonempty = T.nonempty_domain()
d1_nonempty = nonempty[0]
self.assertEqual(d1_nonempty[0].dtype, np.datetime64("", "D"))
self.assertEqual(d1_nonempty[1].dtype, np.datetime64("", "D"))
self.assertTupleEqual(d1_nonempty, (start, end))
# Slice a few days from the middle using two datetimes
with tiledb.DenseArray(uri, "r", attr="a1") as T:
# Slice using datetimes
actual = T[np.datetime64("2010-11-01") : np.datetime64("2011-01-31")]
# Convert datetime interval to integer offset/length into original array
# must be cast to int because float slices are not allowed in NumPy 1.12+
read_offset = int(
(np.datetime64("2010-11-01") - start) / np.timedelta64(1, "D")
)
read_ndays = int(
(np.datetime64("2011-01-31") - np.datetime64("2010-11-01") + 1)
/ np.timedelta64(1, "D")
)
expected = a1_vals[read_offset : read_offset + read_ndays]
assert_array_equal(actual, expected)
# Slice the first year
with tiledb.DenseArray(uri, "r", attr="a1") as T:
actual = T[np.datetime64("2010") : np.datetime64("2011")]
# Convert datetime interval to integer offset/length into original array
read_offset = int(
(np.datetime64("2010-01-01") - start) / np.timedelta64(1, "D")
)
read_ndays = int(
(np.datetime64("2011-01-01") - np.datetime64("2010-01-01") + 1)
/ np.timedelta64(1, "D")
)
expected = a1_vals[read_offset : read_offset + read_ndays]
assert_array_equal(actual, expected)
# Slice open spans
with tiledb.DenseArray(uri, "r", attr="a1") as T:
# Convert datetime interval to integer offset/length into original array
read_offset = int(
(np.datetime64("2010-01-01") - start) / np.timedelta64(1, "D")
)
read_ndays = int(
(np.datetime64("2011-01-31") - np.datetime64("2010-01-01") + 1)
/ np.timedelta64(1, "D")
)
expected = a1_vals[read_offset : read_offset + read_ndays]
# note we only wrote first two years
actual = T.multi_index[np.datetime64("2010-01-01") :]["a1"][:read_ndays]
assert_array_equal(actual, expected)
actual2 = T[np.datetime64("2010-01-01") :][:read_ndays]
assert_array_equal(actual2, expected)
def test_sparse_datetime_vector(self, sparse_cell_order):
uri = self.path("foo_datetime_sparse_vector")
# ns resolution, one tile per second, max domain possible
dim = tiledb.Dim(
name="d1",
domain=(
np.datetime64(0, "ns"),
np.datetime64(int(np.iinfo(np.int64).max) - 1000000000, "ns"),
),
tile=np.timedelta64(1, "s"),
dtype=np.datetime64("", "ns").dtype,
)
self.assertEqual(dim.tile, np.timedelta64("1000000000", "ns"))
dom = tiledb.Domain(dim)
schema = tiledb.ArraySchema(
domain=dom,
sparse=True,
cell_order=sparse_cell_order,
attrs=(tiledb.Attr("a1", dtype=np.float64),),
)
tiledb.Array.create(uri, schema)
# Write 10k cells every 1000 ns starting at time 0
coords = np.datetime64(0, "ns") + np.arange(0, 10000 * 1000, 1000)
a1_vals = np.random.rand(len(coords))
with tiledb.SparseArray(uri, "w") as T:
T[coords] = {"a1": a1_vals}
# Read all
with tiledb.SparseArray(uri, "r") as T:
assert_array_equal(T[:]["a1"], a1_vals)
# Read back first 10 cells
with tiledb.SparseArray(uri, "r") as T:
start = np.datetime64(0, "ns")
vals = T[start : start + np.timedelta64(10000, "ns")]["a1"]
assert_array_equal(vals, a1_vals[0:11])
# Test open ended ranges multi_index
vals2 = T.multi_index[start:]["a1"]
assert_array_equal(vals2, a1_vals)
stop = np.datetime64(int(np.iinfo(np.int64).max) - 1000000000, "ns")
vals3 = T.multi_index[:stop]["a1"]
assert_array_equal(vals3, a1_vals)
def test_datetime_types(self, sparse_cell_order):
units = ["h", "m", "s", "ms", "us", "ns", "ps", "fs"]
for res in units:
uri = self.path("test_datetime_type_" + res)
tmax = 1000
tile = np.timedelta64(1, res)
dim = tiledb.Dim(
name="d1",
domain=(None, None),
tile=tile,
dtype=np.datetime64("", res).dtype,
)
dom = tiledb.Domain(dim)
schema = tiledb.ArraySchema(
domain=dom,
sparse=True,
cell_order=sparse_cell_order,
attrs=(tiledb.Attr("a1", dtype=np.float64),),
)
tiledb.Array.create(uri, schema)
# Write tmax cells every 10 units starting at time 0
coords = np.datetime64(0, res) + np.arange(
0, tmax, 10
) # np.arange(0, 10000 * 1000, 1000)
a1_vals = np.random.rand(len(coords))
with tiledb.SparseArray(uri, "w") as T:
T[coords] = {"a1": a1_vals}
# Read all
with tiledb.SparseArray(uri, "r") as T:
assert_array_equal(T[:]["a1"], a1_vals)
# Read back first 10 cells
with tiledb.SparseArray(uri, "r") as T:
start = np.datetime64(0, res)
vals = T[start : start + np.timedelta64(int(tmax / 10), res)]["a1"]
assert_array_equal(vals, a1_vals[0:11])
class PickleTest(DiskTestCase):
# test that DenseArray and View can be pickled for multiprocess use
# note that the current pickling is by URI and attributes (it is
# not, and likely should not be, a way to serialize array data)
@pytest.mark.parametrize("sparse", [True, False])
def test_pickle_roundtrip(self, sparse):
uri = self.path("test_pickle_roundtrip")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3))
schema = tiledb.ArraySchema(domain=dom, attrs=(tiledb.Attr(""),), sparse=sparse)
tiledb.libtiledb.Array.create(uri, schema)
with tiledb.open(uri, "w") as T:
if sparse:
T[[0, 1, 2]] = np.random.randint(10, size=3)
else:
T[:] = np.random.randint(10, size=3)
with tiledb.open(uri, "r") as T:
with io.BytesIO() as buf:
pickle.dump(T, buf)
buf.seek(0)
with pickle.load(buf) as T2:
assert_array_equal(T.df[:], T2.df[:])
with io.BytesIO() as buf, tiledb.open(uri) as V:
pickle.dump(V, buf)
buf.seek(0)
with pickle.load(buf) as V2:
# make sure anonymous view pickles and round-trips
assert_array_equal(V.df[:], V2.df[:])
@tiledb.scope_ctx({"vfs.s3.region": "kuyper-belt-1", "vfs.max_parallel_ops": "1"})
def test_pickle_with_config(self):
uri = self.path("pickle_config")
T = tiledb.DenseArray.from_numpy(uri, np.random.rand(3, 3))
with io.BytesIO() as buf:
pickle.dump(T, buf)
buf.seek(0)
T2 = pickle.load(buf)
assert_array_equal(T, T2)
self.maxDiff = None
d1 = tiledb.default_ctx().config().dict()
d2 = T2._ctx_().config().dict()
self.assertEqual(d1["vfs.s3.region"], d2["vfs.s3.region"])
self.assertEqual(d1["vfs.max_parallel_ops"], d2["vfs.max_parallel_ops"])
T.close()
T2.close()
@pytest.mark.parametrize("sparse", [True, False])
def test_pickle_with_tuple_timestamps(self, sparse):
A = np.random.randint(10, size=3)
path = self.path("test_pickle_with_tuple_timestamps")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3, dtype=np.int64))
att = tiledb.Attr(dtype=A.dtype)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=sparse)
tiledb.libtiledb.Array.create(path, schema)
for ts in range(1, 5):
with tiledb.open(path, timestamp=ts, mode="w") as T:
if sparse:
T[[0, 1, 2]] = A * ts
else:
T[:] = A * ts
with tiledb.open(path, timestamp=(2, 3), mode="r") as T:
with io.BytesIO() as buf:
pickle.dump(T, buf)
buf.seek(0)
with pickle.load(buf) as T2:
assert_array_equal(T.df[:], T2.df[:])
assert T2.timestamp_range == (2, 3)
with io.BytesIO() as buf, tiledb.open(path, timestamp=(2, 3)) as V:
pickle.dump(V, buf)
buf.seek(0)
with pickle.load(buf) as V2:
# make sure anonymous view pickles and round-trips
assert_array_equal(V.df[:], V2.df[:])
assert V2.timestamp_range == (2, 3)
class ArrayViewTest(DiskTestCase):
def test_view_multiattr(self):
uri = self.path("foo_multiattr")
dom = tiledb.Domain(
tiledb.Dim(domain=(0, 2), tile=3), tiledb.Dim(domain=(0, 2), tile=3)
)
schema = tiledb.ArraySchema(
domain=dom, attrs=(tiledb.Attr(""), tiledb.Attr("named"))
)
tiledb.libtiledb.Array.create(uri, schema)
anon_ar = np.random.rand(3, 3)
named_ar = np.random.rand(3, 3)
with tiledb.DenseArray(uri, "w") as T:
T[:] = {"": anon_ar, "named": named_ar}
with self.assertRaises(KeyError):
T = tiledb.DenseArray(uri, "r", attr="foo111")
with tiledb.DenseArray(uri, "r", attr="named") as T:
assert_array_equal(T, named_ar)
# make sure each attr view can pickle and round-trip
with io.BytesIO() as buf:
pickle.dump(T, buf)
buf.seek(0)
with pickle.load(buf) as T_rt:
assert_array_equal(T, T_rt)
with tiledb.DenseArray(uri, "r", attr="") as T:
assert_array_equal(T, anon_ar)
with io.BytesIO() as buf:
pickle.dump(T, buf)
buf.seek(0)
with pickle.load(buf) as tmp:
assert_array_equal(tmp, anon_ar)
# set subarray on multi-attribute
range_ar = np.arange(0, 9).reshape(3, 3)
with tiledb.DenseArray(uri, "w", attr="named") as V_named:
V_named[1:3, 1:3] = range_ar[1:3, 1:3]
with tiledb.DenseArray(uri, "r", attr="named") as V_named:
assert_array_equal(V_named[1:3, 1:3], range_ar[1:3, 1:3])
class RWTest(DiskTestCase):
def test_read_write(self, capfd):
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3))
att = tiledb.Attr(dtype="int64")
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.libtiledb.Array.create(self.path("foo"), schema)
np_array = np.array([1, 2, 3], dtype="int64")
with tiledb.DenseArray(self.path("foo"), mode="w") as arr:
arr.write_direct(np_array)
with tiledb.DenseArray(self.path("foo"), mode="r") as arr:
arr.dump()
assert_captured(capfd, "Array type: dense")
self.assertEqual(arr.nonempty_domain(), ((0, 2),))
self.assertEqual(arr.ndim, np_array.ndim)
assert_array_equal(arr.read_direct(), np_array)
class TestNumpyToArray(DiskTestCase):
def test_to_array0d(self):
# Cannot create 0-dim arrays in TileDB
np_array = np.array(1)
with self.assertRaises(tiledb.TileDBError):
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as A:
pass
def test_to_array1d(self):
np_array = np.array([1.0, 2.0, 3.0])
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[:], np_array)
def test_to_array2d(self):
np_array = np.ones((100, 100), dtype="i8")
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[:], np_array)
def test_to_array3d(self):
np_array = np.ones((1, 1, 1), dtype="i1")
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[:], np_array)
def test_bytes_to_array1d(self):
np_array = np.array(
[b"abcdef", b"gh", b"ijkl", b"mnopqrs", b"", b"1234545lkjalsdfj"],
dtype=object,
)
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[:], np_array)
with tiledb.DenseArray(self.path("foo")) as arr_reload:
assert_array_equal(arr_reload[:], np_array)
def test_unicode_to_array1d(self):
np_array = np.array(
[
"1234545lkjalsdfj",
"mnopqrs",
"ijkl",
"gh",
"abcdef",
"aαbββcγγγdδδδδ",
"",
'"aαbββc',
"",
"γγγdδδδδ",
],
dtype=object,
)
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[:], np_array)
with tiledb.DenseArray(self.path("foo")) as arr_reload:
assert_array_equal(arr_reload[:], np_array)
def test_array_interface(self):
# Tests that __array__ interface works
np_array1 = np.arange(1, 10)
with tiledb.DenseArray.from_numpy(self.path("arr1"), np_array1) as arr1:
assert_array_equal(np.array(arr1), np_array1)
# Test that __array__ interface throws an error when number of attributes > 1
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), tile=3))
foo = tiledb.Attr("foo", dtype="i8")
bar = tiledb.Attr("bar", dtype="i8")
schema = tiledb.ArraySchema(domain=dom, attrs=(foo, bar))
tiledb.DenseArray.create(self.path("arr2"), schema)
with self.assertRaises(ValueError):
with tiledb.DenseArray(self.path("arr2"), mode="r") as arr2:
np.array(arr2)
def test_array_getindex(self):
# Tests that __getindex__ interface works
np_array = np.arange(1, 10)
with tiledb.DenseArray.from_numpy(self.path("foo"), np_array) as arr:
assert_array_equal(arr[5:10], np_array[5:10])
def test_to_array1d_attr_name(self):
np_array = np.array([1.0, 2.0, 3.0])
with tiledb.DenseArray.from_numpy(
self.path("foo"), np_array, attr_name="a"
) as arr:
assert_array_equal(arr[:]["a"], np_array)
def test_from_numpy_timestamp(self):
path = self.path()
with tiledb.from_numpy(path, np.array([1, 2, 3]), timestamp=10) as A:
pass
with tiledb.open(path, timestamp=(0, 9)) as A:
assert A.nonempty_domain() == None
with tiledb.open(path, timestamp=(10, 10)) as A:
assert A.nonempty_domain() == ((0, 2),)
class TestVFS(DiskTestCase):
def test_supports(self):
vfs = tiledb.VFS()
self.assertTrue(vfs.supports("file"))
self.assertIsInstance(vfs.supports("s3"), bool)
self.assertIsInstance(vfs.supports("hdfs"), bool)
self.assertIsInstance(vfs.supports("gcs"), bool)
self.assertIsInstance(vfs.supports("azure"), bool)
with self.assertRaises(ValueError):
vfs.supports("invalid")
def test_vfs_config(self):
opt = {"region": "us-west-x1234"}
params = [opt, tiledb.Config(opt)]
for param in params:
vfs = tiledb.VFS(param)
assert vfs.config()["region"] == opt["region"]
def test_dir(self):
vfs = tiledb.VFS()
dir = self.path("foo")
self.assertFalse(vfs.is_dir(dir))
# create
vfs.create_dir(dir)
if pytest.tiledb_vfs != "s3":
self.assertTrue(vfs.is_dir(dir))
# remove
vfs.remove_dir(dir)
self.assertFalse(vfs.is_dir(dir))
# create nested path
dir = self.path("foo/bar")
if pytest.tiledb_vfs != "s3":
# this fails locally because "foo" base path does not exist
# this will not fail on s3 because there is no concept of directory
with self.assertRaises(tiledb.TileDBError):
vfs.create_dir(dir)
vfs.create_dir(self.path("foo"))
vfs.create_dir(self.path("foo/bar"))
if pytest.tiledb_vfs != "s3":
self.assertTrue(vfs.is_dir(dir))
def test_file(self):
vfs = tiledb.VFS()
file = self.path("foo")
self.assertFalse(vfs.is_file(file))
# create
vfs.touch(file)
self.assertTrue(vfs.is_file(file))
# remove
vfs.remove_file(file)
self.assertFalse(vfs.is_file(file))
# check nested path
file = self.path("foo/bar")
if pytest.tiledb_vfs != "s3":
# this fails locally because "foo" base path does not exist
# this will not fail on s3 because there is no concept of directory
with self.assertRaises(tiledb.TileDBError):
vfs.touch(file)
def test_move(self):
vfs = tiledb.VFS()
vfs.create_dir(self.path("foo"))
vfs.create_dir(self.path("bar"))
vfs.touch(self.path("bar/baz"))
self.assertTrue(vfs.is_file(self.path("bar/baz")))
vfs.move_file(self.path("bar/baz"), self.path("foo/baz"))
self.assertFalse(vfs.is_file(self.path("bar/baz")))
self.assertTrue(vfs.is_file(self.path("foo/baz")))
# moving to invalid dir should raise an error
if pytest.tiledb_vfs != "s3":
# this fails locally because "foo" base path does not exist
# this will not fail on s3 because there is no concept of directory
with self.assertRaises(tiledb.TileDBError):
vfs.move_dir(self.path("foo/baz"), self.path("do_not_exist/baz"))
@pytest.mark.skipif(
sys.platform == "win32",
reason="VFS copy commands from core are not supported on Windows",
)
def test_copy(self):
vfs = tiledb.VFS()
vfs.create_dir(self.path("foo"))
vfs.create_dir(self.path("bar"))
vfs.touch(self.path("foo/baz"))
self.assertTrue(vfs.is_file(self.path("foo/baz")))
vfs.copy_file(self.path("foo/baz"), self.path("bar/baz"))
self.assertTrue(vfs.is_file(self.path("foo/baz")))
self.assertTrue(vfs.is_file(self.path("bar/baz")))
vfs.copy_dir(self.path("foo"), self.path("baz"))
self.assertTrue(vfs.is_file(self.path("baz/baz")))
# copying to invalid dir should raise an error
if pytest.tiledb_vfs != "s3":
# this fails locally because "foo" base path does not exist
# this will not fail on s3 because there is no concept of directory
with self.assertRaises(tiledb.TileDBError):
vfs.copy_dir(self.path("foo/baz"), self.path("do_not_exist/baz"))
def test_write_read(self):
vfs = tiledb.VFS()
buffer = b"bar"
fh = vfs.open(self.path("foo"), "wb")
vfs.write(fh, buffer)
vfs.close(fh)
self.assertEqual(vfs.file_size(self.path("foo")), 3)
fh = vfs.open(self.path("foo"), "rb")
self.assertEqual(vfs.read(fh, 0, 3), buffer)
vfs.close(fh)
# write / read empty input
fh = vfs.open(self.path("baz"), "wb")
vfs.write(fh, b"")
vfs.close(fh)
self.assertEqual(vfs.file_size(self.path("baz")), 0)
fh = vfs.open(self.path("baz"), "rb")
self.assertEqual(vfs.read(fh, 0, 0), b"")
vfs.close(fh)
# read from file that does not exist
with self.assertRaises(tiledb.TileDBError):
vfs.open(self.path("do_not_exist"), "rb")
def test_io(self):
vfs = tiledb.VFS()
buffer = b"0123456789"
with tiledb.FileIO(vfs, self.path("foo"), mode="wb") as fio:
fio.write(buffer)
fio.flush()
self.assertEqual(fio.tell(), len(buffer))
with tiledb.FileIO(vfs, self.path("foo"), mode="rb") as fio:
with self.assertRaises(IOError):
fio.write(b"foo")
self.assertEqual(vfs.file_size(self.path("foo")), len(buffer))
fio = tiledb.FileIO(vfs, self.path("foo"), mode="rb")
self.assertEqual(fio.read(3), b"012")
self.assertEqual(fio.tell(), 3)
self.assertEqual(fio.read(3), b"345")
self.assertEqual(fio.tell(), 6)
self.assertEqual(fio.read(10), b"6789")
self.assertEqual(fio.tell(), 10)
# seek from beginning
fio.seek(0)
self.assertEqual(fio.tell(), 0)
self.assertEqual(fio.read(), buffer)
# seek must be positive when SEEK_SET
with self.assertRaises(ValueError):
fio.seek(-1, 0)
# seek from current positfion
fio.seek(5)
self.assertEqual(fio.tell(), 5)
fio.seek(3, 1)
self.assertEqual(fio.tell(), 8)
fio.seek(-3, 1)
self.assertEqual(fio.tell(), 5)
# seek from end
fio.seek(-4, 2)
self.assertEqual(fio.tell(), 6)
# Test readall
fio.seek(0)
self.assertEqual(fio.readall(), buffer)
self.assertEqual(fio.tell(), 10)
fio.seek(5)
self.assertEqual(fio.readall(), buffer[5:])
self.assertEqual(fio.readall(), b"")
# Reading from the end should return empty
fio.seek(0)
fio.read()
self.assertEqual(fio.read(), b"")
# Test writing and reading lines with TextIOWrapper
lines = [rand_utf8(random.randint(0, 50)) + "\n" for _ in range(10)]
rand_uri = self.path("test_fio.rand")
with tiledb.FileIO(vfs, rand_uri, "wb") as f:
txtio = io.TextIOWrapper(f, encoding="utf-8")
txtio.writelines(lines)
txtio.flush()
with tiledb.FileIO(vfs, rand_uri, "rb") as f2:
txtio = io.TextIOWrapper(f2, encoding="utf-8")
self.assertEqual(txtio.readlines(), lines)
def test_ls(self):
basepath = self.path("test_vfs_ls")
self.vfs.create_dir(basepath)
for id in (1, 2, 3):
dir = os.path.join(basepath, "dir" + str(id))
self.vfs.create_dir(dir)
fname = os.path.join(basepath, "file_" + str(id))
with tiledb.FileIO(self.vfs, fname, "wb") as fio:
fio.write(b"")
expected = ("file_1", "file_2", "file_3")
# empty directories do not "exist" on s3
if pytest.tiledb_vfs != "s3":
expected = expected + ("dir1", "dir2", "dir3")
self.assertSetEqual(
set(expected),
set(
map(
lambda x: os.path.basename(x.split("test_vfs_ls")[1]),
self.vfs.ls(basepath),
)
),
)
def test_dir_size(self):
vfs = tiledb.VFS()
path = self.path("test_vfs_dir_size")
vfs.create_dir(path)
rand_sizes = np.random.choice(100, size=4, replace=False)
for size in rand_sizes:
file_path = os.path.join(path, "f_" + str(size))
with tiledb.FileIO(vfs, file_path, "wb") as f:
data = os.urandom(size)
f.write(data)
self.assertEqual(vfs.dir_size(path), sum(rand_sizes))
class ConsolidationTest(DiskTestCase):
def test_array_vacuum(self):
dshape = (0, 19)
num_writes = 10
def create_array(target_path):
dom = tiledb.Domain(tiledb.Dim(domain=dshape, tile=3))
att = tiledb.Attr(dtype="int64")
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.libtiledb.Array.create(target_path, schema)
def write_fragments(target_path):
for i in range(num_writes):
with tiledb.open(target_path, "w") as A:
A[i : dshape[1]] = np.random.rand(dshape[1] - i)
# array #1
path = self.path("test_array_vacuum")
create_array(path)
write_fragments(path)
fi = tiledb.FragmentInfoList(path)
self.assertEqual(len(fi), num_writes)
tiledb.consolidate(path)
tiledb.vacuum(path)
fi = tiledb.FragmentInfoList(path)
self.assertEqual(len(fi), 1)
# array #2
path2 = self.path("test_array_vacuum_fragment_meta")
create_array(path2)
write_fragments(path2)
fi = tiledb.FragmentInfoList(path2)
self.assertEqual(fi.unconsolidated_metadata_num, num_writes)
tiledb.consolidate(
path2, config=tiledb.Config({"sm.consolidation.mode": "fragment_meta"})
)
tiledb.vacuum(path2, config=tiledb.Config({"sm.vacuum.mode": "fragment_meta"}))
fi = tiledb.FragmentInfoList(path2)
self.assertEqual(fi.unconsolidated_metadata_num, 0)
# array #3
path3 = self.path("test_array_vacuum2")
create_array(path3)
write_fragments(path3)
fi = tiledb.FragmentInfoList(path3)
self.assertEqual(fi.unconsolidated_metadata_num, num_writes)
conf = tiledb.Config({"sm.consolidation.mode": "fragment_meta"})
with tiledb.open(path3, "w") as A:
A.consolidate(config=conf)
fi = tiledb.FragmentInfoList(path3)
self.assertEqual(fi.unconsolidated_metadata_num, 0)
def test_array_consolidate_with_timestamp(self):
dshape = (1, 3)
num_writes = 10
def create_array(target_path, dshape):
dom = tiledb.Domain(tiledb.Dim(domain=dshape, tile=len(dshape)))
att = tiledb.Attr(dtype="int64")
schema = tiledb.ArraySchema(domain=dom, attrs=(att,), sparse=True)
tiledb.libtiledb.Array.create(target_path, schema)
def write_fragments(target_path, dshape, num_writes):
for i in range(1, num_writes + 1):
with tiledb.open(target_path, "w", timestamp=i) as A:
A[[1, 2, 3]] = np.random.rand(dshape[1])
path = self.path("test_array_consolidate_with_timestamp")
create_array(path, dshape)
write_fragments(path, dshape, num_writes)
frags = tiledb.FragmentInfoList(path)
assert len(frags) == 10
tiledb.consolidate(path, timestamp=(1, 4))
frags = tiledb.FragmentInfoList(path)
assert len(frags) == 7
assert len(frags.to_vacuum) == 4
tiledb.vacuum(path, timestamp=(1, 2))
frags = tiledb.FragmentInfoList(path)
assert len(frags.to_vacuum) == 2
tiledb.vacuum(path)
frags = tiledb.FragmentInfoList(path)
assert len(frags.to_vacuum) == 0
conf = tiledb.Config(
{"sm.consolidation.timestamp_start": 5, "sm.consolidation.timestamp_end": 9}
)
tiledb.consolidate(path, config=conf)
tiledb.vacuum(path)
frags = tiledb.FragmentInfoList(path)
assert len(frags.timestamp_range) == 3
@pytest.mark.skipif(sys.platform == "win32", reason="Only run MemoryTest on linux")
class MemoryTest(DiskTestCase):
# sanity check that memory usage doesn't increase more than 2x when reading 40MB 100x
# https://github.com/TileDB-Inc/TileDB-Py/issues/150
@staticmethod
def use_many_buffers(path):
# https://stackoverflow.com/questions/938733/total-memory-used-by-python-process
process = psutil.Process(os.getpid())
x = np.ones(10000000, dtype=np.float32)
d1 = tiledb.Dim(
"test_domain", domain=(0, x.shape[0] - 1), tile=10000, dtype="uint32"
)
domain = tiledb.Domain(d1)
v = tiledb.Attr("test_value", dtype="float32")
schema = tiledb.ArraySchema(
domain=domain, attrs=(v,), cell_order="row-major", tile_order="row-major"
)
A = tiledb.DenseArray.create(path, schema)
with tiledb.DenseArray(path, mode="w") as A:
A[:] = {"test_value": x}
with tiledb.DenseArray(path, mode="r") as data:
data[:]
initial = process.memory_info().rss
print(" initial RSS: {}".format(round(initial / (10 ** 6)), 2))
for i in range(100):
# read but don't store: this memory should be freed
data[:]
if i % 10 == 0:
print(
" read iter {}, RSS (MB): {}".format(
i, round(process.memory_info().rss / (10 ** 6), 2)
)
)
return initial
def test_memory_cleanup(self, capfd):
# run function which reads 100x from a 40MB test array
# TODO: RSS is too loose to do this end-to-end, so should use instrumentation.
print("Starting TileDB-Py memory test:")
initial = self.use_many_buffers(self.path("test_memory_cleanup"))
process = psutil.Process(os.getpid())
final = process.memory_info().rss
print(" final RSS: {}".format(round(final / (10 ** 6)), 2))
gc.collect()
final_gc = process.memory_info().rss
print(" final RSS after forced GC: {}".format(round(final_gc / (10 ** 6)), 2))
assert_captured(capfd, "final RSS")
self.assertTrue(final < (2 * initial))
class TestHighlevel(DiskTestCase):
def test_open(self):
uri = self.path("test_open")
array = np.random.rand(10)
schema = tiledb.schema_like(array)
tiledb.Array.create(uri, schema)
with tiledb.open(uri, "w") as A:
A[:] = array * 10
A[:] = array
last_fragment_ts = list(A.last_write_info.items())[0][1][0]
ctx = tiledb.Ctx()
with tiledb.DenseArray(uri, ctx=ctx) as A:
self.assertEqual(A._ctx_(), ctx)
# test `open` with timestamp
with tiledb.open(uri, timestamp=last_fragment_ts) as A:
assert_array_equal(A[:], array)
with tiledb.open(uri, ctx=ctx) as A:
self.assertEqual(A._ctx_(), ctx)
config = tiledb.Config()
with tiledb.open(uri, config=config) as A:
self.assertEqual(A._ctx_().config(), config)
with self.assertRaises(KeyError):
# This path must test `tiledb.open` specifically
# https://github.com/TileDB-Inc/TileDB-Py/issues/277
tiledb.open(uri, "r", attr="the-missing-attr")
def test_ctx_thread_cleanup(self):
# This test checks that contexts are destroyed correctly.
# It creates new contexts repeatedly, in-process, and
# checks that the total number of threads stays stable.
config = {"sm.num_reader_threads": 128}
ll = list()
uri = self.path("test_ctx_thread_cleanup")
with tiledb.from_numpy(uri, np.random.rand(100)) as A:
pass
thisproc = psutil.Process(os.getpid())
for n in range(0, 10):
if n > 0:
retry = 0
while retry < 3:
try:
# checking exact thread count is unreliable, so
# make sure we are holding < 2x per run.
self.assertTrue(len(thisproc.threads()) < 2 * start_threads)
break
except AssertionError as exc:
raise exc
except RuntimeError as rterr:
retry += 1
if retry > 2:
raise rterr
warnings.warn(
"Thread cleanup test RuntimeError: {} \n on iteration: {}".format(
str(rterr), n
)
)
with tiledb.DenseArray(uri, ctx=tiledb.Ctx(config)) as A:
res = A[:]
if n == 0:
start_threads = len(thisproc.threads())
# Wrapper to execute specific code in subprocess so that we can ensure the thread count
# init is correct. Necessary because multiprocess.get_context is only available in Python 3.4+,
# and the multiprocessing method may be set to fork by other tests (e.g. dask).
def init_test_wrapper(cfg=None):
python_exe = sys.executable
cmd = "from test_libtiledb import *; init_test_helper({})".format(cfg)
test_path = os.path.dirname(os.path.abspath(__file__))
sp_output = subprocess.check_output([python_exe, "-c", cmd], cwd=test_path)
return int(sp_output.decode("UTF-8").strip())
def init_test_helper(cfg=None):
tiledb.libtiledb.default_ctx(cfg)
concurrency_level = tiledb.default_ctx().config()["sm.io_concurrency_level"]
print(int(concurrency_level))
class ContextTest(unittest.TestCase):
def test_default_ctx(self):
ctx = tiledb.default_ctx()
self.assertIsInstance(ctx, tiledb.Ctx)
assert isinstance(ctx.config(), tiledb.libtiledb.Config)
def test_scope_ctx(self):
key = "sm.tile_cache_size"
ctx0 = tiledb.default_ctx()
new_config_dict = {key: 42}
new_config = tiledb.Config({key: 78})
new_ctx = tiledb.Ctx({key: 61})
assert tiledb.default_ctx() is ctx0
assert tiledb.default_ctx().config()[key] == "10000000"
with tiledb.scope_ctx(new_config_dict) as ctx1:
assert tiledb.default_ctx() is ctx1
assert tiledb.default_ctx().config()[key] == "42"
with tiledb.scope_ctx(new_config) as ctx2:
assert tiledb.default_ctx() is ctx2
assert tiledb.default_ctx().config()[key] == "78"
with tiledb.scope_ctx(new_ctx) as ctx3:
assert tiledb.default_ctx() is ctx3 is new_ctx
assert tiledb.default_ctx().config()[key] == "61"
assert tiledb.default_ctx() is ctx2
assert tiledb.default_ctx().config()[key] == "78"
assert tiledb.default_ctx() is ctx1
assert tiledb.default_ctx().config()[key] == "42"
assert tiledb.default_ctx() is ctx0
assert tiledb.default_ctx().config()[key] == "10000000"
@pytest.mark.skipif(
"pytest.tiledb_vfs == 's3'", reason="Test not yet supported with S3"
)
def test_init_config(self):
self.assertEqual(
int(tiledb.default_ctx().config()["sm.io_concurrency_level"]),
init_test_wrapper(),
)
self.assertEqual(3, init_test_wrapper({"sm.io_concurrency_level": 3}))
class GetStatsTest(DiskTestCase):
def test_ctx(self):
tiledb.libtiledb.stats_enable()
ctx = tiledb.default_ctx()
uri = self.path("test_ctx")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), dtype=np.int64))
att = tiledb.Attr(dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.Array.create(uri, schema)
with tiledb.open(uri, mode="w", ctx=ctx) as T:
T[:] = np.random.randint(10, size=3)
stats = ctx.get_stats(print_out=False)
assert "Context.StorageManager.write_store" in stats
def test_query(self):
tiledb.libtiledb.stats_enable()
uri = self.path("test_ctx")
dom = tiledb.Domain(tiledb.Dim(domain=(0, 2), dtype=np.int64))
att = tiledb.Attr(dtype=np.int64)
schema = tiledb.ArraySchema(domain=dom, attrs=(att,))
tiledb.Array.create(uri, schema)
with tiledb.open(uri, mode="w") as T:
T[:] = np.random.randint(10, size=3)
with tiledb.open(uri, mode="r") as T:
q = T.query()
assert "" == q.get_stats()
q[:]
stats = q.get_stats(print_out=False)
assert "Context.StorageManager.Query" in stats
class ReprTest(DiskTestCase):
def test_attr_repr(self):
attr = tiledb.Attr(name="itsanattr", dtype=np.float64)
self.assertTrue(
re.match(
r"Attr\(name=[u]?'itsanattr', dtype='float64', var=False, nullable=False\)",
repr(attr),
)
)
g = dict()
exec("from tiledb import Attr; from numpy import float64", g)
self.assertEqual(eval(repr(attr), g), attr)
def test_dim_repr(self):
dtype_set = [bytes, np.bytes_]
opts = {
None: None,
"var": True,
"domain": (None, None),
"filters": [tiledb.GzipFilter()],
}
dim_test_imports = textwrap.dedent(
"""
from tiledb import Dim, FilterList, GzipFilter
from numpy import float64
"""
)
for dtype in dtype_set:
opt_choices = [
itertools.combinations(opts.keys(), r=n)
for n in range(1, len(opts) + 1)
]
for opt_set in itertools.chain(*opt_choices):
opt_kwarg = {k: opts[k] for k in opt_set if k}
g = dict()
exec(dim_test_imports, g)
dim = tiledb.Dim(name="d1", dtype=dtype, **opt_kwarg)
self.assertEqual(eval(repr(dim), g), dim)
def test_arrayschema_repr(self, sparse_cell_order):
filters = tiledb.FilterList([tiledb.ZstdFilter(-1)])
for sparse in [False, True]:
cell_order = sparse_cell_order if sparse else None
domain = tiledb.Domain(
tiledb.Dim(domain=(1, 8), tile=2), tiledb.Dim(domain=(1, 8), tile=2)
)
a1 = tiledb.Attr("val", dtype="f8", filters=filters)
orig_schema = tiledb.ArraySchema(
domain=domain, attrs=(a1,), sparse=sparse, cell_order=cell_order
)
schema_repr = repr(orig_schema)
g = dict()
setup = "from tiledb import *\n" "import numpy as np\n"
exec(setup, g)
new_schema = None
try:
new_schema = eval(schema_repr, g)
except Exception as exc:
warn_str = (
"""Exception during ReprTest schema eval"""
+ """, schema string was:\n"""
+ """'''"""
+ """\n{}\n'''""".format(schema_repr)
)
warnings.warn(warn_str)
raise
self.assertEqual(new_schema, orig_schema)
def test_arrayschema_repr_hilbert(self):
domain = tiledb.Domain(tiledb.Dim(domain=(1, 8), tile=2))
a = tiledb.Attr("a", dtype="f8")
schema = tiledb.ArraySchema(
domain=domain, attrs=(a,), cell_order="hilbert", sparse=True
)
assert schema.cell_order == "hilbert"
assert schema.tile_order == None
class NullableIOTest(DiskTestCase):
def test_nullable_write(self):
uri = self.path("nullable_write_test")
schema = tiledb.ArraySchema(
domain=tiledb.Domain(
*[tiledb.Dim(name="__dim_0", domain=(0, 3), tile=4, dtype="uint64")]
),
attrs=[tiledb.Attr(name="", dtype="int64", var=False, nullable=True)],
)
tiledb.Array.create(uri, schema)
with tiledb.open(uri, "w") as A:
A._setitem_impl(
slice(0, 4), np.ones(4), {"": np.array([0, 1, 0, 1], dtype=np.uint8)}
)
class IncompleteTest(DiskTestCase):
def test_incomplete_dense_varlen(self):
ncells = 10
path = self.path("incomplete_dense_varlen")
str_data = [rand_utf8(random.randint(0, n)) for n in range(ncells)]
data = np.array(str_data, dtype=np.unicode_)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(1, len(data)), tile=len(data)))
att = tiledb.Attr(dtype=np.unicode_, var=True)
schema = tiledb.ArraySchema(dom, (att,))
tiledb.DenseArray.create(path, schema)
with tiledb.DenseArray(path, mode="w") as T:
T[:] = data
with tiledb.DenseArray(path, mode="r") as T:
assert_array_equal(data, T[:])
# set the memory to the max length of a cell
# these settings force ~100 retries
# TODO would be good to check repeat count here; not yet exposed
# Also would be useful to have max cell config in libtiledb.
init_buffer_bytes = 1024 ** 2
config = tiledb.Config(
{
"sm.memory_budget": ncells,
"sm.memory_budget_var": ncells,
"py.init_buffer_bytes": init_buffer_bytes,
}
)
self.assertEqual(config["py.init_buffer_bytes"], str(init_buffer_bytes))
with tiledb.DenseArray(path, mode="r", ctx=tiledb.Ctx(config)) as T2:
df = T2.query(attrs=[""]).df[:]
assert_array_equal(df[""], data)
def test_incomplete_sparse_varlen(self):
ncells = 100
path = self.path("incomplete_sparse_varlen")
str_data = [rand_utf8(random.randint(0, n)) for n in range(ncells)]
data = np.array(str_data, dtype=np.unicode_)
coords = np.arange(ncells)
# basic write
dom = tiledb.Domain(tiledb.Dim(domain=(0, len(data) + 100), tile=len(data)))
att = tiledb.Attr(dtype=np.unicode_, var=True)
schema = tiledb.ArraySchema(dom, (att,), sparse=True)
tiledb.SparseArray.create(path, schema)
with tiledb.SparseArray(path, mode="w") as T:
T[coords] = data
with tiledb.SparseArray(path, mode="r") as T:
assert_array_equal(data, T[:][""])
# set the memory to the max length of a cell
# these settings force ~100 retries
# TODO would be good to check repeat count here; not yet exposed
# Also would be useful to have max cell config in libtiledb.
init_buffer_bytes = 1024 ** 2
config = tiledb.Config(
{
"sm.memory_budget": ncells,
"sm.memory_budget_var": ncells,
"py.init_buffer_bytes": init_buffer_bytes,
}
)
self.assertEqual(config["py.init_buffer_bytes"], str(init_buffer_bytes))
with tiledb.SparseArray(path, mode="r", ctx=tiledb.Ctx(config)) as T2:
assert_array_equal(data, T2[:][""])
assert_array_equal(data, T2.multi_index[0:ncells][""])
# ensure that empty results are handled correctly
assert_array_equal(
T2.multi_index[101:105][""], np.array([], dtype=np.dtype("<U"))
)
@pytest.mark.parametrize(
"return_arrow, indexer", [(True, "df"), (False, "df"), (False, "multi_index")]
)
def test_incomplete_return(
self, test_incomplete_return_array, return_arrow, indexer
):
import pyarrow as pa
import pandas as pd
from tiledb.multirange_indexing import EstimatedResultSize
path = test_incomplete_return_array
init_buffer_bytes = 200
cfg = tiledb.Config(
{
"py.init_buffer_bytes": init_buffer_bytes,
"py.exact_init_buffer_bytes": "true",
}
)
with tiledb.open(path) as A:
full_data = A[:][""]
# count number of elements retrieved so that we can slice the comparison array
idx = 0
with tiledb.open(path, ctx=tiledb.Ctx(cfg)) as A:
query = A.query(return_incomplete=True, return_arrow=return_arrow)
iterable = getattr(query, indexer)
for result in iterable[:]:
est_results = iterable.estimated_result_sizes()
assert isinstance(est_results[""], EstimatedResultSize)
assert isinstance(est_results["__dim_0"], EstimatedResultSize)
assert est_results["__dim_0"].offsets_bytes == 0
assert est_results["__dim_0"].data_bytes > 0
assert est_results[""].offsets_bytes > 0
assert est_results[""].data_bytes > 0
if return_arrow:
assert isinstance(result, pa.Table)
df = result.to_pandas()
else:
if indexer == "df":
assert isinstance(result, pd.DataFrame)
df = result
else:
assert isinstance(result, OrderedDict)
df = pd.DataFrame(result)
to_slice = slice(idx, idx + len(df))
chunk = full_data[to_slice]
assert np.all(chunk == df[""].values)
assert np.all(df["__dim_0"] == np.arange(idx, idx + len(df)))
# update the current read count
idx += len(df)
assert idx == len(full_data)
class TestTest(DiskTestCase):
def test_path(self, pytestconfig):
path = self.path("foo")
if pytestconfig.getoption("vfs") == "s3":
assert path.startswith("s3://")
@pytest.mark.skipif(
sys.platform == "win32", reason="no_output fixture disabled on Windows"
)
@pytest.mark.xfail(
True, reason="This test prints, and should fail because of no_output fixture!"
)
def test_no_output(self):
print("this test should fail")
# if __name__ == '__main__':
# # run a single example for in-process debugging
# # better to use `pytest --gdb` if available
# t = DenseArrayTest()
# t.setUp()
# t.test_array_1d()
| 36.532236
| 97
| 0.555683
|
f14798c435e472d89cc025532cc39eaa5e7c7624
| 141
|
py
|
Python
|
KerasRFCN/__init__.py
|
denis19973/Keras-RFCN
|
e62670c2e01ac1e942f513d324642cf8d6aee368
|
[
"MIT"
] | 88
|
2018-05-04T08:04:02.000Z
|
2022-01-05T02:57:28.000Z
|
KerasRFCN/__init__.py
|
denis19973/Keras-RFCN
|
e62670c2e01ac1e942f513d324642cf8d6aee368
|
[
"MIT"
] | 16
|
2018-07-03T11:58:51.000Z
|
2021-07-12T04:49:05.000Z
|
KerasRFCN/__init__.py
|
mitulrm/FaceRFCN
|
5e1fdaf197b3a93c22a82d9476a3f9a1c804e398
|
[
"MIT"
] | 33
|
2018-05-04T08:02:32.000Z
|
2022-01-09T14:39:06.000Z
|
# __init.py__
__all__ = ['Config', 'Data_generator', 'Losses', 'Utils','Model.BaseModel','Model.Model','Model.ResNet_dilated','Model.ResNet']
| 70.5
| 127
| 0.723404
|
5406cbce64a8d4e21f8a785d3442fc564d3c09e9
| 1,754
|
py
|
Python
|
util/chplenv/chpl_aux_filesys.py
|
KING-SID/chapel
|
8fe143dff7395a9600794ec0c3921038d8c81784
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2019-03-23T02:57:15.000Z
|
2019-03-23T02:57:22.000Z
|
util/chplenv/chpl_aux_filesys.py
|
KING-SID/chapel
|
8fe143dff7395a9600794ec0c3921038d8c81784
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
util/chplenv/chpl_aux_filesys.py
|
KING-SID/chapel
|
8fe143dff7395a9600794ec0c3921038d8c81784
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import sys
import os
from glob import glob
import overrides
from utils import memoize
@memoize
def get():
aux_fs = overrides.get('CHPL_AUX_FILESYS', 'none')
if aux_fs == 'hdfs':
java_subdir = os.environ.get('JAVA_INSTALL', '')
aux_fs_subdir = os.environ.get('HADOOP_INSTALL', '')
# This will not check that all dependencies are satisfied..
found_java = os.path.isdir(os.path.join(java_subdir, 'include'))
found_hdfs = os.path.exists(os.path.join(aux_fs_subdir,
'include', 'hdfs.h'))
found_hdfs_lib = os.path.exists(os.path.join(aux_fs_subdir, 'lib',
'native', 'libhdfs.a'))
if not found_java:
sys.stderr.write("Warning: Can't find your Java installation\n")
if not found_hdfs or not found_hdfs_lib:
sys.stderr.write("Warning: Can't find your Hadoop installation\n")
elif aux_fs == 'hdfs3':
def fetchInfo(env, envtype, filename, err):
directories = map(lambda z: z.lstrip(envtype), os.environ.get(env, '').split())
res = sum([ os.path.exists(os.path.join(d, filename)) for d in directories ])
if res < 1:
sys.stderr.write(err)
return False
return True
fetchInfo('CHPL_AUXIO_INCLUDE', '-I', 'hdfs.h', "Warning: Can't find your HDFS3 header file installation\n")
fetchInfo('CHPL_AUXIO_LIBS', '-L', 'libhdfs3.a', "Warning: Can't find your HDFS3 static library installation\n")
return aux_fs
def _main():
aux_fs_val = get()
sys.stdout.write("{0}\n".format(aux_fs_val))
if __name__ == '__main__':
_main()
| 33.09434
| 120
| 0.596921
|
6d52355d20f755ae2a7a5d7b0acabd4b4ac32b0b
| 2,646
|
py
|
Python
|
superset/superset/sqllab/execution_context_convertor.py
|
mvbvieira/finance_pipeline
|
48b085aeafda61c82f77de4ae67ceb02ac32f683
|
[
"Apache-2.0"
] | 1
|
2020-11-03T06:34:21.000Z
|
2020-11-03T06:34:21.000Z
|
superset/superset/sqllab/execution_context_convertor.py
|
mvbvieira/finance_pipeline
|
48b085aeafda61c82f77de4ae67ceb02ac32f683
|
[
"Apache-2.0"
] | 57
|
2020-04-20T17:41:41.000Z
|
2022-03-16T21:38:05.000Z
|
superset/superset/sqllab/execution_context_convertor.py
|
mvbvieira/finance_pipeline
|
48b085aeafda61c82f77de4ae67ceb02ac32f683
|
[
"Apache-2.0"
] | 2
|
2021-10-12T17:51:34.000Z
|
2021-10-15T18:55:52.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import annotations
from typing import TYPE_CHECKING
import simplejson as json
import superset.utils.core as utils
from superset.sqllab.command import ExecutionContextConvertor
from superset.sqllab.command_status import SqlJsonExecutionStatus
from superset.sqllab.utils import apply_display_max_row_configuration_if_require
if TYPE_CHECKING:
from superset.sqllab.sqllab_execution_context import SqlJsonExecutionContext
from superset.sqllab.sql_json_executer import SqlResults
from superset.models.sql_lab import Query
class ExecutionContextConvertorImpl(ExecutionContextConvertor):
_max_row_in_display_configuration: int # pylint: disable=invalid-name
def set_max_row_in_display(self, value: int) -> None:
self._max_row_in_display_configuration = value # pylint: disable=invalid-name
def to_payload(
self,
execution_context: SqlJsonExecutionContext,
execution_status: SqlJsonExecutionStatus,
) -> str:
if execution_status == SqlJsonExecutionStatus.HAS_RESULTS:
return self._to_payload_results_based(
execution_context.get_execution_result() or {}
)
return self._to_payload_query_based(execution_context.query)
def _to_payload_results_based(self, execution_result: SqlResults) -> str:
return json.dumps(
apply_display_max_row_configuration_if_require(
execution_result, self._max_row_in_display_configuration
),
default=utils.pessimistic_json_iso_dttm_ser,
ignore_nan=True,
encoding=None,
)
def _to_payload_query_based( # pylint: disable=no-self-use
self, query: Query
) -> str:
return json.dumps(
{"query": query.to_dict()}, default=utils.json_int_dttm_ser, ignore_nan=True
)
| 38.911765
| 88
| 0.744142
|
0aa48284fba207e4208529a8585205792cbacfbe
| 2,652
|
py
|
Python
|
utils/fairseq_mod/examples/linformer/src/modules/linformer_sentence_encoder_layer.py
|
saidineshpola/Knowledge-Distillation-Toolkit
|
b05ebc28ae1385c9caa1c4c1c93db2d67356e85f
|
[
"MIT"
] | 69
|
2021-03-27T10:28:27.000Z
|
2022-03-29T07:32:02.000Z
|
utils/fairseq_mod/examples/linformer/src/modules/linformer_sentence_encoder_layer.py
|
saidineshpola/Knowledge-Distillation-Toolkit
|
b05ebc28ae1385c9caa1c4c1c93db2d67356e85f
|
[
"MIT"
] | 5
|
2021-05-24T08:56:59.000Z
|
2021-11-19T09:21:31.000Z
|
utils/fairseq_mod/examples/linformer/src/modules/linformer_sentence_encoder_layer.py
|
saidineshpola/Knowledge-Distillation-Toolkit
|
b05ebc28ae1385c9caa1c4c1c93db2d67356e85f
|
[
"MIT"
] | 20
|
2021-03-27T10:30:32.000Z
|
2022-03-17T17:13:41.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable
from fairseq_mod.modules import TransformerSentenceEncoderLayer
from .multihead_linear_attention import MultiheadLinearAttention
class LinformerSentenceEncoderLayer(TransformerSentenceEncoderLayer):
"""
Implements a Linformer Encoder Layer used in BERT/XLM style pre-trained
models.
"""
def __init__(
self,
embedding_dim: int = 768,
ffn_embedding_dim: int = 3072,
num_attention_heads: int = 8,
dropout: float = 0.1,
attention_dropout: float = 0.1,
activation_dropout: float = 0.1,
activation_fn: str = 'relu',
export: bool = False,
q_noise: float = 0.0,
qn_block_size: int = 8,
init_fn: Callable = None,
compressed: int = 1,
max_seq_len: int = 256,
shared_kv_compressed: int = 0,
shared_compress_layer: any = None,
freeze_compress: int = 0,
) -> None:
# Initialize linformer parameters
self.compressed = compressed
self.max_seq_len = max_seq_len
self.shared_kv_compressed = shared_kv_compressed
self.freeze_compress = freeze_compress
def init_fn():
# This needs to be set after nn.Module.__init__ is called
self.shared_compress_layer = shared_compress_layer
super().__init__(
embedding_dim=embedding_dim,
ffn_embedding_dim=ffn_embedding_dim,
num_attention_heads=num_attention_heads,
dropout=dropout,
attention_dropout=attention_dropout,
activation_dropout=activation_dropout,
activation_fn=activation_fn,
export=export,
q_noise=q_noise,
qn_block_size=qn_block_size,
init_fn=init_fn,
)
def build_self_attention(
self,
embed_dim,
num_attention_heads,
dropout,
self_attention,
q_noise,
qn_block_size,
):
return MultiheadLinearAttention(
embed_dim,
num_attention_heads,
dropout=dropout,
self_attention=True,
q_noise=q_noise,
qn_block_size=qn_block_size,
compressed=self.compressed,
max_seq_len=self.max_seq_len,
shared_kv_compressed=self.shared_kv_compressed,
shared_compress_layer=self.shared_compress_layer,
freeze_compress=self.freeze_compress,
)
| 31.571429
| 75
| 0.636501
|
12beb113fed3a3dac5d04d5b551d0734e2b045c1
| 81,613
|
py
|
Python
|
convoy/remotefs.py
|
EricSchles/batch-shipyard
|
bfcfcc1f3706e700055ad1a04829593a882c4595
|
[
"MIT"
] | 1
|
2018-07-30T21:10:10.000Z
|
2018-07-30T21:10:10.000Z
|
convoy/remotefs.py
|
EricSchles/batch-shipyard
|
bfcfcc1f3706e700055ad1a04829593a882c4595
|
[
"MIT"
] | null | null | null |
convoy/remotefs.py
|
EricSchles/batch-shipyard
|
bfcfcc1f3706e700055ad1a04829593a882c4595
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# compat imports
from __future__ import (
absolute_import, division, print_function
)
from builtins import ( # noqa
bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import functools
import json
import logging
import os
try:
import pathlib2 as pathlib
except ImportError:
import pathlib
# non-stdlib imports
import msrestazure.azure_exceptions
# local imports
from . import crypto
from . import resource
from . import settings
from . import storage
from . import util
# create logger
logger = logging.getLogger(__name__)
util.setup_logger(logger)
def _create_managed_disk(compute_client, rfs, disk_name):
# type: (azure.mgmt.compute.ComputeManagementClient,
# settings.RemoteFsSettings, str) ->
# msrestazure.azure_operation.AzureOperationPoller
"""Create a managed disk
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param settings.RemoteFsSettings rfs: remote filesystem settings
:param str disk_name: disk name
:rtype: msrestazure.azure_operation.AzureOperationPoller
:return: async operation handle
"""
account_type = (
compute_client.disks.models.StorageAccountTypes.premium_lrs
if rfs.managed_disks.premium else
compute_client.disks.models.StorageAccountTypes.standard_lrs
)
logger.info('creating managed disk: {}'.format(disk_name))
return compute_client.disks.create_or_update(
resource_group_name=rfs.managed_disks.resource_group,
disk_name=disk_name,
disk=compute_client.disks.models.Disk(
location=rfs.managed_disks.location,
creation_data=compute_client.disks.models.CreationData(
create_option=compute_client.disks.models.
DiskCreateOption.empty,
),
sku=compute_client.disks.models.DiskSku(
name=account_type,
),
os_type=compute_client.disks.models.OperatingSystemTypes.linux,
disk_size_gb=rfs.managed_disks.disk_size_gb,
),
)
def create_managed_disks(resource_client, compute_client, config, wait=True):
# type: (azure.mgmt.resource.resources.ResourceManagementClient,
# azure.mgmt.compute.ComputeManagementClient, dict, bool) -> None
"""Create managed disks
:param azure.mgmt.resource.resources.ResourceManagementClient
resource_client: resource client
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param dict config: configuration dict
:param bool wait: wait for operation to complete
"""
# retrieve remotefs settings
rfs = settings.remotefs_settings(config)
# create resource group if it doesn't exist
resource.create_resource_group(
resource_client, rfs.managed_disks.resource_group,
rfs.managed_disks.location)
# iterate disks and create disks if they don't exist
existing_disk_sizes = set()
async_ops = {}
for disk_name in rfs.managed_disks.disk_names:
try:
disk = compute_client.disks.get(
resource_group_name=rfs.managed_disks.resource_group,
disk_name=disk_name)
logger.debug('{} exists [created={} size={} GB]'.format(
disk.id, disk.time_created, disk.disk_size_gb))
existing_disk_sizes.add(disk.disk_size_gb)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
existing_disk_sizes.add(rfs.managed_disks.disk_size_gb)
if len(existing_disk_sizes) != 1:
existing_disk_sizes.discard(rfs.managed_disks.disk_size_gb)
raise RuntimeError(
('Inconsistent disk sizes for newly created disks '
'({} GB) to existing disks ({} GB)').format(
rfs.managed_disks.disk_size_gb,
existing_disk_sizes)
)
async_ops[disk_name] = resource.AsyncOperation(
functools.partial(
_create_managed_disk, compute_client, rfs, disk_name))
else:
raise
# block for all ops to complete if specified
# note that if wait is not specified and there is no delay, the request
# may not get acknowledged...
if wait:
if len(async_ops) > 0:
logger.debug('waiting for all {} disks to provision'.format(
len(async_ops)))
for disk_name in async_ops:
disk = async_ops[disk_name].result()
logger.info('{} created with size of {} GB'.format(
disk.id, disk.disk_size_gb))
def delete_managed_disks(
resource_client, compute_client, config, name, resource_group=None,
all=False, delete_resource_group=False, wait=False,
confirm_override=False):
# type: (azure.mgmt.resource.resources.ResourceManagementClient,
# azure.mgmt.compute.ComputeManagementClient, dict, str or list,
# bool, bool, bool, bool) -> dict
"""Delete managed disks
:param azure.mgmt.resource.resources.ResourceManagementClient
resource_client: resource client
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param dict config: configuration dict
:param str or list name: specific disk name or list of names
:param str resource_group: resource group of the disks
:param bool all: delete all disks in resource group
:param bool delete_resource_group: delete resource group
:param bool wait: wait for operation to complete
:param bool confirm_override: override confirmation of delete
:rtype: dict or None
:return: dictionary of disk names -> async ops if wait is False,
otherwise None
"""
# retrieve remotefs settings if necessary
rfs = None
if resource_group is None:
rfs = settings.remotefs_settings(config)
resource_group = rfs.managed_disks.resource_group
# delete rg if specified
if delete_resource_group:
if (not confirm_override and not util.confirm_action(
config, 'delete resource group {}'.format(resource_group))):
return
logger.info('deleting resource group {}'.format(resource_group))
async_delete = resource_client.resource_groups.delete(
resource_group_name=resource_group)
if wait:
logger.debug('waiting for resource group {} to delete'.format(
resource_group))
async_delete.result()
logger.info('resource group {} deleted'.format(
resource_group))
return
# set disks to delete
if all:
disks = [
x[0].split('/')[-1] for x in list_disks(
compute_client, config, resource_group=resource_group,
restrict_scope=False)
]
else:
if util.is_none_or_empty(name):
if rfs is None:
rfs = settings.remotefs_settings(config)
disks = rfs.managed_disks.disk_names
else:
if isinstance(name, list):
disks = name
else:
disks = [name]
# iterate disks and delete them
async_ops = {}
for disk_name in disks:
if (not confirm_override and not util.confirm_action(
config,
'delete managed disk {} from resource group {}'.format(
disk_name, resource_group))):
continue
logger.info('deleting managed disk {} in resource group {}'.format(
disk_name, resource_group))
async_ops[disk_name] = resource.AsyncOperation(functools.partial(
compute_client.disks.delete, resource_group_name=resource_group,
disk_name=disk_name), retry_conflict=True)
# block for all ops to complete if specified
if wait:
if len(async_ops) > 0:
logger.debug('waiting for all {} disks to be deleted'.format(
len(async_ops)))
for disk_name in async_ops:
async_ops[disk_name].result()
logger.info('{} managed disks deleted in resource group {}'.format(
len(async_ops), resource_group))
else:
return async_ops
def list_disks(
compute_client, config, resource_group=None, restrict_scope=False):
# type: (azure.mgmt.compute.ComputeManagementClient, dict, str, bool) ->
# List[str, computemodels.StorageAccountTypes]
"""List managed disks
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param dict config: configuration dict
:param str resource_group: resource group to list from
:param bool restrict_scope: restrict scope to config
:rtype: list
:return list of (disk ids, disk account type)
"""
# retrieve remotefs settings
rfs = settings.remotefs_settings(config)
confdisks = frozenset(rfs.managed_disks.disk_names)
resource_group = resource_group or rfs.managed_disks.resource_group
# list disks in resource group
logger.debug(
('listing all managed disks in resource group {} '
'[restrict_scope={}]').format(resource_group, restrict_scope))
disks = compute_client.disks.list_by_resource_group(
resource_group_name=resource_group)
ret = []
i = 0
for disk in disks:
if restrict_scope and disk.name not in confdisks:
continue
logger.info(
'{} [provisioning_state={} created={} size={} type={}]'.format(
disk.id, disk.provisioning_state, disk.time_created,
disk.disk_size_gb, disk.sku.name))
ret.append((disk.id, disk.sku.name))
i += 1
if i == 0:
logger.error(
('no managed disks found in resource group {} '
'[restrict_scope={}]').format(resource_group, restrict_scope))
return ret
def _create_virtual_machine_extension(
compute_client, rfs, bootstrap_file, blob_urls, vm_name, disks,
private_ips, offset, verbose=False):
# type: (azure.mgmt.compute.ComputeManagementClient,
# settings.RemoteFsSettings, str, List[str], str, dict, List[str],
# int) -> msrestazure.azure_operation.AzureOperationPoller
"""Create a virtual machine extension
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param settings.RemoteFsSettings rfs: remote filesystem settings
:param str bootstrap_file: bootstrap file
:param list blob_urls: blob urls
:param str vm_name: vm name
:param dict disks: data disk map
:param list private_ips: list of static private ips
:param int offset: vm number
:param bool verbose: verbose logging
:rtype: msrestazure.azure_operation.AzureOperationPoller
:return: msrestazure.azure_operation.AzureOperationPoller
"""
# construct vm extensions
vm_ext_name = settings.generate_virtual_machine_extension_name(
rfs.storage_cluster, offset)
# get premium storage settings
premium = False
for diskname in rfs.storage_cluster.vm_disk_map[offset].disk_array:
if (disks[diskname][1] ==
compute_client.disks.models.StorageAccountTypes.premium_lrs):
premium = True
break
# construct server options
server_options = []
st = rfs.storage_cluster.file_server.type
so = rfs.storage_cluster.file_server.server_options
# special processing for gluster
if st == 'glusterfs':
# always create the following options if they don't exist
server_options.append(
settings.get_file_server_glusterfs_volume_name(
rfs.storage_cluster))
server_options.append(
settings.get_file_server_glusterfs_volume_type(
rfs.storage_cluster))
server_options.append(
settings.get_file_server_glusterfs_transport(
rfs.storage_cluster))
# process key pairs
if st in so:
for key in so[st]:
if (key == 'volume_name' or key == 'volume_type' or
key == 'transport'):
continue
server_options.append('{}:{}'.format(key, so[st][key]))
server_options = ','.join(server_options)
elif st == 'nfs':
try:
nfs_hosts = so[st]
except KeyError:
nfs_hosts = None
if util.is_none_or_empty(nfs_hosts):
nfs_hosts = {'*': []}
nfs_exports = []
for host in nfs_hosts:
opt = []
for eo in nfs_hosts[host]:
if (not eo.startswith('mountpath=') and
not eo.startswith('mp=')):
opt.append(eo)
if util.is_none_or_empty(opt):
opt.extend(['rw', 'sync', 'root_squash', 'no_subtree_check'])
nfs_exports.append('{}%{}'.format(host, ','.join(opt)))
server_options = ';'.join(nfs_exports)
del nfs_hosts
del nfs_exports
logger.debug('server options: {}'.format(server_options))
# create samba option
if util.is_not_empty(rfs.storage_cluster.file_server.samba.share_name):
samba = rfs.storage_cluster.file_server.samba
smb = '{share}:{user}:{pw}:{uid}:{gid}:{ro}:{cm}:{dm}'.format(
share=samba.share_name,
user=samba.account.username,
pw=samba.account.password,
uid=samba.account.uid,
gid=samba.account.gid,
ro=samba.read_only,
cm=samba.create_mask,
dm=samba.directory_mask,
)
else:
smb = None
# construct bootstrap command
if rfs.storage_cluster.prometheus.ne_enabled:
if util.is_not_empty(rfs.storage_cluster.prometheus.ne_options):
pneo = ','.join(rfs.storage_cluster.prometheus.ne_options)
else:
pneo = ''
promopt = ' -e \'{},{}\''.format(
rfs.storage_cluster.prometheus.ne_port, pneo)
del pneo
else:
promopt = ''
cmd = './{bsf} {c}{d}{e}{f}{i}{m}{n}{o}{p}{r}{s}{t}'.format(
bsf=bootstrap_file,
c=' -c \'{}\''.format(smb) if util.is_not_empty(smb) else '',
d=' -d {}'.format(rfs.storage_cluster.hostname_prefix),
e=promopt,
f=' -f {}'.format(rfs.storage_cluster.vm_disk_map[offset].filesystem),
i=' -i {}'.format(
','.join(private_ips)) if util.is_not_empty(private_ips) else '',
m=' -m {}'.format(rfs.storage_cluster.file_server.mountpoint),
n=' -n' if settings.can_tune_tcp(rfs.storage_cluster.vm_size) else '',
o=' -o \'{}\''.format(server_options) if util.is_not_empty(
server_options) else '',
p=' -p' if premium else '',
r=' -r {}'.format(rfs.storage_cluster.vm_disk_map[offset].raid_level),
s=' -s {}'.format(rfs.storage_cluster.file_server.type),
t=' -t {}'.format(
','.join(rfs.storage_cluster.file_server.mount_options)
if util.is_not_empty(rfs.storage_cluster.file_server.mount_options)
else ''))
if verbose:
logger.debug('bootstrap command: {}'.format(cmd))
logger.debug('creating virtual machine extension: {}'.format(vm_ext_name))
return compute_client.virtual_machine_extensions.create_or_update(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
vm_extension_name=vm_ext_name,
extension_parameters=compute_client.virtual_machine_extensions.models.
VirtualMachineExtension(
location=rfs.storage_cluster.location,
publisher='Microsoft.Azure.Extensions',
virtual_machine_extension_type='CustomScript',
type_handler_version='2.0',
auto_upgrade_minor_version=True,
settings={
'fileUris': blob_urls,
},
protected_settings={
'commandToExecute': cmd,
'storageAccountName': storage.get_storageaccount(),
'storageAccountKey': storage.get_storageaccount_key(),
},
),
)
def _create_availability_set(compute_client, rfs):
# type: (azure.mgmt.compute.ComputeManagementClient,
# settings.RemoteFsSettings) ->
# msrestazure.azure_operation.AzureOperationPoller
"""Create an availability set
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param settings.RemoteFsSettings rfs: remote filesystem settings
:rtype: msrestazure.azure_operation.AzureOperationPoller or None
:return: msrestazure.azure_operation.AzureOperationPoller
"""
if rfs.storage_cluster.vm_count <= 1:
logger.warning('insufficient vm_count for availability set')
return None
as_name = settings.generate_availability_set_name(rfs.storage_cluster)
# check and fail if as exists
try:
compute_client.availability_sets.get(
resource_group_name=rfs.storage_cluster.resource_group,
availability_set_name=as_name,
)
raise RuntimeError('availability set {} exists'.format(as_name))
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
pass
else:
raise
logger.debug('creating availability set: {}'.format(as_name))
return compute_client.availability_sets.create_or_update(
resource_group_name=rfs.storage_cluster.resource_group,
availability_set_name=as_name,
# user maximums ud, fd from settings due to region variability
parameters=compute_client.virtual_machines.models.AvailabilitySet(
location=rfs.storage_cluster.location,
platform_update_domain_count=20,
platform_fault_domain_count=rfs.storage_cluster.fault_domains,
sku=compute_client.virtual_machines.models.Sku(
name='Aligned',
),
)
)
def create_storage_cluster(
resource_client, compute_client, network_client, blob_client, config,
sc_id, bootstrap_file, remotefs_files):
# type: (azure.mgmt.resource.resources.ResourceManagementClient,
# azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient,
# azure.storage.blob.BlockBlobService, dict, str, str,
# List[tuple]) -> None
"""Create a storage cluster
:param azure.mgmt.resource.resources.ResourceManagementClient
resource_client: resource client
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param str sc_id: storage cluster id
:param str bootstrap_file: customscript bootstrap file
:param list remotefs_files: remotefs shell scripts
:param dict config: configuration dict
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# check if cluster already exists
logger.debug('checking if storage cluster {} exists'.format(sc_id))
# construct disk map
disk_map = {}
try:
disk_names = list_disks(compute_client, config, restrict_scope=True)
for disk_id, sat in disk_names:
disk_map[disk_id.split('/')[-1]] = (disk_id, sat)
del disk_names
except msrestazure.azure_exceptions.CloudError as e:
logger.error(
'could not enumerate required disks for storage cluster {}'.format(
sc_id))
raise
# check vms
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
raise RuntimeError(
'Existing virtual machine {} found, cannot add this '
'storage cluster'.format(vm.id))
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
pass
else:
raise
# check if all referenced managed disks exist and premium sku
# is specified if premium disk
for disk in rfs.storage_cluster.vm_disk_map[i].disk_array:
if disk not in disk_map:
raise RuntimeError(
('Referenced managed disk {} unavailable in set {} for '
'vm offset {}').format(disk, disk_map, i))
if (disk_map[disk][1] ==
compute_client.disks.models.
StorageAccountTypes.premium_lrs and
not settings.is_premium_storage_vm_size(
rfs.storage_cluster.vm_size)):
raise RuntimeError(
('Premium storage requires premium storage capable '
'vm_size instead of {}'.format(
rfs.storage_cluster.vm_size)))
# confirm before proceeding
if not util.confirm_action(
config, 'create storage cluster {}'.format(sc_id)):
return
# create resource group if it doesn't exist
resource.create_resource_group(
resource_client, rfs.storage_cluster.resource_group,
rfs.storage_cluster.location)
# create storage container
storage.create_storage_containers_nonbatch(blob_client, None, 'remotefs')
# upload scripts to blob storage for customscript vm extension
blob_urls = storage.upload_for_nonbatch(
blob_client, remotefs_files, 'remotefs')
# async operation dictionary
async_ops = {}
# create nsg
async_ops['nsg'] = resource.AsyncOperation(functools.partial(
resource.create_network_security_group, network_client,
rfs.storage_cluster))
# create static private ip block
if rfs.storage_cluster.file_server.type == 'nfs':
private_ips = None
logger.debug('using dynamic private ip address allocation')
else:
# follow Azure numbering scheme: start offset at 4
private_ips = [
x for x in util.ip_from_address_prefix(
rfs.storage_cluster.virtual_network.subnet_address_prefix,
start_offset=4,
max=rfs.storage_cluster.vm_count)
]
logger.debug('static private ip addresses to assign: {}'.format(
private_ips))
# create virtual network and subnet if specified
vnet, subnet = resource.create_virtual_network_and_subnet(
resource_client, network_client,
rfs.storage_cluster.virtual_network.resource_group,
rfs.storage_cluster.location,
rfs.storage_cluster.virtual_network)
# create public ips
pips = None
if rfs.storage_cluster.public_ip.enabled:
async_ops['pips'] = {}
for i in range(rfs.storage_cluster.vm_count):
async_ops['pips'][i] = resource.AsyncOperation(functools.partial(
resource.create_public_ip, network_client,
rfs.storage_cluster, i))
logger.debug('waiting for public ips to provision')
pips = {}
for offset in async_ops['pips']:
pip = async_ops['pips'][offset].result()
logger.info(
('public ip: {} [provisioning_state={} ip_address={} '
'public_ip_allocation={}]').format(
pip.id, pip.provisioning_state,
pip.ip_address, pip.public_ip_allocation_method))
pips[offset] = pip
else:
logger.info('public ip is disabled for storage cluster: {}'.format(
sc_id))
# get nsg
logger.debug('waiting for network security group to provision')
nsg = async_ops['nsg'].result()
# create nics
async_ops['nics'] = {}
for i in range(rfs.storage_cluster.vm_count):
async_ops['nics'][i] = resource.AsyncOperation(functools.partial(
resource.create_network_interface, network_client,
rfs.storage_cluster, subnet, nsg, private_ips, pips, i))
# create availability set if vm_count > 1, this call is not async
availset = _create_availability_set(compute_client, rfs)
# wait for nics to be created
logger.debug('waiting for network interfaces to provision')
nics = {}
for offset in async_ops['nics']:
nic = async_ops['nics'][offset].result()
logger.info(
('network interface: {} [provisioning_state={} private_ip={} '
'private_ip_allocation_method={} network_security_group={} '
'accelerated_networking={}]').format(
nic.id, nic.provisioning_state,
nic.ip_configurations[0].private_ip_address,
nic.ip_configurations[0].private_ip_allocation_method,
nsg.name if nsg is not None else None,
nic.enable_accelerated_networking))
nics[offset] = nic
# read or generate ssh keys
if util.is_not_empty(rfs.storage_cluster.ssh.ssh_public_key_data):
key_data = rfs.storage_cluster.ssh.ssh_public_key_data
else:
# create universal ssh key for all vms if not specified
ssh_pub_key = rfs.storage_cluster.ssh.ssh_public_key
if ssh_pub_key is None:
_, ssh_pub_key = crypto.generate_ssh_keypair(
rfs.storage_cluster.ssh.generated_file_export_path,
crypto.get_remotefs_ssh_key_prefix())
# read public key data
with ssh_pub_key.open('rb') as fd:
key_data = fd.read().decode('utf8')
ssh_pub_key = compute_client.virtual_machines.models.SshPublicKey(
path='/home/{}/.ssh/authorized_keys'.format(
rfs.storage_cluster.ssh.username),
key_data=key_data,
)
# create vms
async_ops['vms'] = {}
for i in range(rfs.storage_cluster.vm_count):
async_ops['vms'][i] = resource.AsyncOperation(functools.partial(
resource.create_virtual_machine, compute_client,
rfs.storage_cluster, availset, nics, disk_map, ssh_pub_key, i))
# wait for vms to be created
logger.info(
'waiting for {} virtual machines to provision'.format(
len(async_ops['vms'])))
vms = {}
for offset in async_ops['vms']:
vms[offset] = async_ops['vms'][offset].result()
logger.debug('{} virtual machines created'.format(len(vms)))
# wait for all vms to be created before installing extensions to prevent
# variability in wait times and timeouts during customscript
async_ops['vmext'] = {}
for i in range(rfs.storage_cluster.vm_count):
# install vm extension
async_ops['vmext'][i] = resource.AsyncOperation(
functools.partial(
_create_virtual_machine_extension, compute_client, rfs,
bootstrap_file, blob_urls, vms[i].name, disk_map,
private_ips, i, settings.verbose(config)),
max_retries=0,
)
logger.debug('waiting for virtual machine extensions to provision')
for offset in async_ops['vmext']:
# get ip info for vm
if util.is_none_or_empty(pips):
ipinfo = 'private_ip_address={}'.format(
nics[offset].ip_configurations[0].private_ip_address)
else:
# refresh public ip for vm
pip = network_client.public_ip_addresses.get(
resource_group_name=rfs.storage_cluster.resource_group,
public_ip_address_name=pips[offset].name,
)
ipinfo = 'fqdn={} public_ip_address={}'.format(
pip.dns_settings.fqdn, pip.ip_address)
# get vm extension result
vm_ext = async_ops['vmext'][offset].result()
vm = vms[offset]
logger.info(
('virtual machine: {} [provisioning_state={}/{} '
'vm_size={} {}]').format(
vm.id, vm.provisioning_state, vm_ext.provisioning_state,
vm.hardware_profile.vm_size, ipinfo))
def resize_storage_cluster(
compute_client, network_client, blob_client, config, sc_id,
bootstrap_file, addbrick_file, remotefs_files):
# type: (azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient, dict, str, str, str,
# list) -> bool
"""Resize a storage cluster (increase size only for now)
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param str bootstrap_file: bootstrap file
:param str addbrick_file: glusterfs addbrick file
:param list remotefs_files: remotefs files to upload
:rtype: bool
:return: if cluster was resized
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# if storage cluster is not glusterfs, exit
if rfs.storage_cluster.file_server.type != 'glusterfs':
raise ValueError(
'Resize is only supported on glusterfs storage clusters')
# only allow certain types of resizes to proceed
# for now disallow resize on all stripe volumes, can be relaxed in
# the future
voltype = settings.get_file_server_glusterfs_volume_type(
rfs.storage_cluster).lower()
if 'stripe' in voltype:
raise RuntimeError('Cannot resize glusterfs striped volumes')
# construct disk map
disk_map = {}
disk_names = list_disks(compute_client, config, restrict_scope=True)
for disk_id, sat in disk_names:
disk_map[disk_id.split('/')[-1]] = (disk_id, sat)
del disk_names
# get existing vms
new_vms = []
pe_vms = {}
all_pe_disks = set()
vnet_name = None
subnet_name = None
nsg_name = None
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
new_vms.append(i)
continue
else:
raise
entry = {
'vm': vm,
'disks': set(),
}
for dd in vm.storage_profile.data_disks:
entry['disks'].add(dd.name)
all_pe_disks.add(dd.name.lower())
# get vnet, subnet, nsg names
if vnet_name is None or subnet_name is None or nsg_name is None:
_, _, subnet_name, vnet_name, nsg_name = \
resource.get_resource_names_from_virtual_machine(
compute_client, network_client, rfs.storage_cluster, vm)
# add vm to map
pe_vms[i] = entry
# check early return conditions
if len(new_vms) == 0:
logger.warning(
'no new virtual machines to add in storage cluster {}'.format(
sc_id))
return False
# ensure that new disks to add are not already attached and
# are provisioned
for i in new_vms:
for disk in rfs.storage_cluster.vm_disk_map[i].disk_array:
if disk.lower() in all_pe_disks:
raise RuntimeError(
'Disk {} for new VM {} is already attached'.format(
disk, i))
# check disks for new vms are provisioned
if disk not in disk_map:
raise RuntimeError(
('Disk {} for new VM {} is not provisioned in '
'resource group {}').format(
disk, i, rfs.storage_cluster.resource_group))
logger.warning(
('**WARNING** cluster resize is an experimental feature and may lead '
'to data loss, unavailability or an unrecoverable state for '
'the storage cluster {}.'.format(sc_id)))
# confirm before proceeding
if not util.confirm_action(
config, 'resize storage cluster {}'.format(sc_id)):
return False
# re-create storage container in case it got deleted
storage.create_storage_containers_nonbatch(blob_client, None, 'remotefs')
# upload scripts to blob storage for customscript vm extension
blob_urls = storage.upload_for_nonbatch(
blob_client, remotefs_files, 'remotefs')
# create static private ip block, start offset at 4
private_ips = [
x for x in util.ip_from_address_prefix(
rfs.storage_cluster.virtual_network.subnet_address_prefix,
start_offset=4,
max=rfs.storage_cluster.vm_count)
]
logger.debug('static private ip block: {}'.format(private_ips))
async_ops = {}
# create public ips
if rfs.storage_cluster.public_ip.enabled:
async_ops['pips'] = {}
for i in new_vms:
async_ops['pips'][i] = resource.AsyncOperation(functools.partial(
resource.create_public_ip, network_client,
rfs.storage_cluster, i))
else:
logger.info('public ip is disabled for storage cluster: {}'.format(
sc_id))
# get subnet and nsg objects
subnet = network_client.subnets.get(
resource_group_name=rfs.storage_cluster.resource_group,
virtual_network_name=vnet_name,
subnet_name=subnet_name,
)
nsg = network_client.network_security_groups.get(
resource_group_name=rfs.storage_cluster.resource_group,
network_security_group_name=nsg_name,
)
# get ssh login info of prober vm
ssh_info = None
for i in pe_vms:
vm = pe_vms[i]['vm']
ssh_info = _get_ssh_info(
compute_client, network_client, config, sc_id, None, vm.name)
break
if settings.verbose(config):
logger.debug('prober vm: {}'.format(ssh_info))
# wait for public ips
pips = None
if 'pips' in async_ops:
logger.debug('waiting for public ips to provision')
pips = {}
for offset in async_ops['pips']:
pip = async_ops['pips'][offset].result()
logger.info(
('public ip: {} [provisioning_state={} ip_address={} '
'public_ip_allocation={}]').format(
pip.id, pip.provisioning_state,
pip.ip_address, pip.public_ip_allocation_method))
pips[offset] = pip
# create nics
nics = {}
async_ops['nics'] = {}
for i in new_vms:
async_ops['nics'][i] = resource.AsyncOperation(functools.partial(
resource.create_network_interface, network_client,
rfs.storage_cluster, subnet, nsg, private_ips, pips, i))
# get availability set
availset = compute_client.availability_sets.get(
resource_group_name=rfs.storage_cluster.resource_group,
availability_set_name=settings.generate_availability_set_name(
rfs.storage_cluster),
)
# wait for nics to be created
logger.debug('waiting for network interfaces to provision')
for offset in async_ops['nics']:
nic = async_ops['nics'][offset].result()
logger.info(
('network interface: {} [provisioning_state={} private_ip={} '
'private_ip_allocation_method={} network_security_group={} '
'accelerated={}]').format(
nic.id, nic.provisioning_state,
nic.ip_configurations[0].private_ip_address,
nic.ip_configurations[0].private_ip_allocation_method,
nsg.name if nsg is not None else None,
nic.enable_accelerated_networking))
nics[offset] = nic
# read or generate ssh keys
if util.is_not_empty(rfs.storage_cluster.ssh.ssh_public_key_data):
key_data = rfs.storage_cluster.ssh.ssh_public_key_data
else:
# create universal ssh key for all vms if not specified
ssh_pub_key = rfs.storage_cluster.ssh.ssh_public_key
if ssh_pub_key is None:
# check if ssh key exists first in default location
ssh_pub_key = pathlib.Path(
rfs.storage_cluster.ssh.generated_file_export_path,
crypto.get_remotefs_ssh_key_prefix() + '.pub')
if not ssh_pub_key.exists():
_, ssh_pub_key = crypto.generate_ssh_keypair(
rfs.storage_cluster.ssh.generated_file_export_path,
crypto.get_remotefs_ssh_key_prefix())
# read public key data
with ssh_pub_key.open('rb') as fd:
key_data = fd.read().decode('utf8')
ssh_pub_key = compute_client.virtual_machines.models.SshPublicKey(
path='/home/{}/.ssh/authorized_keys'.format(
rfs.storage_cluster.ssh.username),
key_data=key_data,
)
# create vms
async_ops['vms'] = {}
for i in new_vms:
async_ops['vms'][i] = resource.AsyncOperation(functools.partial(
resource.create_virtual_machine, compute_client,
rfs.storage_cluster, availset, nics, disk_map, ssh_pub_key, i))
# gather all new private ips
new_private_ips = {}
for offset in nics:
new_private_ips[offset] = nics[
offset].ip_configurations[0].private_ip_address
if settings.verbose(config):
logger.debug('new private ips: {}'.format(new_private_ips))
# wait for vms to be created
logger.info(
'waiting for {} virtual machines to provision'.format(
len(async_ops['vms'])))
vm_hostnames = []
vms = {}
for offset in async_ops['vms']:
vms[offset] = async_ops['vms'][offset].result()
# generate vm names in list
vm_hostnames.append(settings.generate_virtual_machine_name(
rfs.storage_cluster, offset))
logger.debug('{} virtual machines created: {}'.format(
len(vms), vm_hostnames))
# wait for all vms to be created before installing extensions to prevent
# variability in wait times and timeouts during customscript
async_ops['vmext'] = {}
for i in new_vms:
# install vm extension
async_ops['vmext'][i] = resource.AsyncOperation(
functools.partial(
_create_virtual_machine_extension, compute_client, rfs,
bootstrap_file, blob_urls, vms[i].name, disk_map, private_ips,
i, settings.verbose(config)),
max_retries=0,
)
logger.debug(
'adding {} bricks to gluster volume, this may take a while'.format(
len(async_ops['vmext'])))
# execute special add brick script
script_cmd = '/opt/batch-shipyard/{asf} {c}{d}{i}{n}{v}'.format(
asf=addbrick_file,
c=' -c {}'.format(rfs.storage_cluster.vm_count),
d=' -d {}'.format(','.join(vm_hostnames)),
i=' -i {}'.format(','.join(list(new_private_ips.values()))),
n=' -n {}'.format(
settings.get_file_server_glusterfs_volume_name(
rfs.storage_cluster)),
v=' -v \'{}\''.format(voltype),
)
if settings.verbose(config):
logger.debug('add brick command: {}'.format(script_cmd))
ssh_priv_key, port, username, ip = ssh_info
proc = crypto.connect_or_exec_ssh_command(
ip, port, ssh_priv_key, username, sync=False,
command=['sudo', script_cmd])
stdout, stderr = proc.communicate()
logline = 'add brick script completed with ec={}'.format(proc.returncode)
if util.is_not_empty(stdout):
if util.on_python2():
stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
if util.is_not_empty(stderr):
if util.on_python2():
stderr = stderr.decode('utf8')
if util.on_windows():
stderr = stderr.replace('\n', os.linesep)
if proc.returncode != 0:
logger.error(logline)
logger.error('add brick stdout:{}{}'.format(os.linesep, stdout))
logger.error('add brick stderr:{}{}'.format(os.linesep, stderr))
else:
logger.info(logline)
logger.debug('add brick stdout:{}{}'.format(os.linesep, stdout))
del logline
del stdout
del stderr
# wait for new vms to finish custom script extension processing
logger.debug('waiting for virtual machine extensions to provision')
for offset in async_ops['vmext']:
# get ip info for vm
if util.is_none_or_empty(pips):
ipinfo = 'private_ip_address={}'.format(
nics[offset].ip_configurations[0].private_ip_address)
else:
# refresh public ip for vm
pip = network_client.public_ip_addresses.get(
resource_group_name=rfs.storage_cluster.resource_group,
public_ip_address_name=pips[offset].name,
)
ipinfo = 'fqdn={} public_ip_address={}'.format(
pip.dns_settings.fqdn, pip.ip_address)
# get vm extension result
vm_ext = async_ops['vmext'][offset].result()
vm = vms[offset]
logger.info(
('virtual machine: {} [provisioning_state={}/{} '
'vm_size={} {}]').format(
vm.id, vm.provisioning_state, vm_ext.provisioning_state,
vm.hardware_profile.vm_size, ipinfo))
if proc.returncode == 0:
logger.info('storage cluster {} resized'.format(sc_id))
else:
logger.critical('failed to resize cluster {}'.format(sc_id))
def expand_storage_cluster(
compute_client, network_client, config, sc_id, bootstrap_file,
rebalance=False):
# type: (azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient, dict, str, str,
# bool) -> bool
"""Expand a storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param str bootstrap_file: bootstrap file
:param bool rebalance: rebalance filesystem
:rtype: bool
:return: if cluster was expanded
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# check if cluster exists
logger.debug('checking if storage cluster {} exists'.format(sc_id))
# construct disk map
disk_map = {}
disk_names = list_disks(compute_client, config, restrict_scope=True)
for disk_id, sat in disk_names:
disk_map[disk_id.split('/')[-1]] = (disk_id, sat)
del disk_names
# check vms
vms = {}
new_disk_count = 0
mdadm_expand = False
for i in range(rfs.storage_cluster.vm_count):
# check if this vm filesystem supports expanding
if (rfs.storage_cluster.vm_disk_map[i].filesystem != 'btrfs' and
rfs.storage_cluster.vm_disk_map[i].raid_level == 0):
mdadm_expand = True
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
raise RuntimeError(
'Virtual machine {} not found, cannot expand this '
'storage cluster'.format(vm_name))
else:
raise
# create entry
entry = {
'vm': vm,
'pe_disks': {
'names': set(),
'luns': [],
},
'new_disks': [],
}
# get attached disks
for dd in vm.storage_profile.data_disks:
entry['pe_disks']['names'].add(dd.name)
entry['pe_disks']['luns'].append(dd.lun)
# check if all referenced managed disks exist
for disk in rfs.storage_cluster.vm_disk_map[i].disk_array:
if disk not in disk_map:
raise RuntimeError(
('Referenced managed disk {} unavailable in set {} for '
'vm offset {}. Ensure that this disk has been '
'provisioned first.').format(disk, disk_map, i))
if disk not in entry['pe_disks']['names']:
entry['new_disks'].append(disk)
new_disk_count += 1
# check for proper raid setting and number of disks
pe_len = len(entry['pe_disks']['names'])
if pe_len <= 1 or rfs.storage_cluster.vm_disk_map[i].raid_level != 0:
raise RuntimeError(
'Cannot expand array from {} disk(s) or RAID level {}'.format(
pe_len, rfs.storage_cluster.vm_disk_map[i].raid_level))
# add vm to map
vms[i] = entry
# check early return conditions
if len(vms) == 0:
logger.warning(
'no virtual machines to expand in storage cluster {}'.format(
sc_id))
return False
if settings.verbose(config):
logger.debug('expand settings:{}{}'.format(os.linesep, vms))
if new_disk_count == 0:
logger.error(
'no new disks detected for storage cluster {}'.format(sc_id))
return False
if mdadm_expand:
logger.warning(
'**WARNING** cluster expansion is being performed on mdadm-based '
'RAID arrays. This feature is experimental and can take an '
'extremely long time. Any interruption or unrecoverable '
'failure can result in data loss.')
del mdadm_expand
# confirm before proceeding
if not util.confirm_action(
config, 'expand storage cluster {}'.format(sc_id)):
return False
# attach new data disks to each vm
async_ops = {}
for key in vms:
entry = vms[key]
if len(entry['new_disks']) == 0:
logger.debug('no new disks to attach to virtual machine {}'.format(
vm.id))
continue
vm = entry['vm']
premium = False
# sort lun array and get last element
lun = sorted(entry['pe_disks']['luns'])[-1] + 1
for diskname in entry['new_disks']:
if (disk_map[diskname][1] ==
compute_client.disks.models.
StorageAccountTypes.premium_lrs):
premium = True
vm.storage_profile.data_disks.append(
compute_client.disks.models.DataDisk(
lun=lun,
name=diskname,
create_option=compute_client.disks.models.
DiskCreateOptionTypes.attach,
managed_disk=compute_client.disks.models.
ManagedDiskParameters(
id=disk_map[diskname][0],
),
)
)
lun += 1
logger.info(
('attaching {} additional data disks {} to virtual '
'machine {}').format(
len(entry['new_disks']), entry['new_disks'], vm.name))
# update vm
async_ops[key] = (
premium,
resource.AsyncOperation(functools.partial(
compute_client.virtual_machines.create_or_update,
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm.name, parameters=vm))
)
# wait for async ops to complete
if len(async_ops) == 0:
logger.error('no operations started for expansion')
return False
logger.debug(
'waiting for disks to attach to virtual machines and expanding '
'the volume; please be patient as this can take a very long time')
for offset in async_ops:
premium, op = async_ops[offset]
vm = op.result()
vms[offset]['vm'] = vm
# execute bootstrap script via ssh
script_cmd = \
'/opt/batch-shipyard/{bsf} {a}{b}{d}{f}{m}{p}{r}{s}'.format(
bsf=bootstrap_file,
a=' -a',
b=' -b' if rebalance else '',
d=' -d {}'.format(rfs.storage_cluster.hostname_prefix),
f=' -f {}'.format(
rfs.storage_cluster.vm_disk_map[offset].filesystem),
m=' -m {}'.format(
rfs.storage_cluster.file_server.mountpoint),
p=' -p' if premium else '',
r=' -r {}'.format(
rfs.storage_cluster.vm_disk_map[offset].raid_level),
s=' -s {}'.format(rfs.storage_cluster.file_server.type),
)
ssh_priv_key, port, username, ip = _get_ssh_info(
compute_client, network_client, config, sc_id, None, vm.name)
if settings.verbose(config):
logger.debug('bootstrap command: {}'.format(script_cmd))
proc = crypto.connect_or_exec_ssh_command(
ip, port, ssh_priv_key, username, sync=False,
command=['sudo', script_cmd])
stdout, stderr = proc.communicate()
if util.is_not_empty(stdout):
if util.on_python2():
stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
if util.is_not_empty(stderr):
if util.on_python2():
stderr = stderr.decode('utf8')
if util.on_windows():
stderr = stderr.replace('\n', os.linesep)
vms[offset]['status'] = proc.returncode
vms[offset]['stdout'] = '>>stdout>> {}:{}{}'.format(
vm.name, os.linesep, stdout)
vms[offset]['stderr'] = '>>stderr>> {}:{}{}'.format(
vm.name, os.linesep, stderr)
logger.info('disk attach operations completed')
succeeded = True
for key in vms:
entry = vms[key]
vm = entry['vm']
log = 'bootstrap exit code for virtual machine {}: {}'.format(
vm.name, entry['status'])
if entry['status'] == 0:
logger.info(log)
logger.debug(entry['stdout'])
else:
logger.error(log)
logger.error(entry['stdout'])
logger.error(entry['stderr'])
succeeded = False
if succeeded:
logger.info('storage cluster {} expanded'.format(sc_id))
else:
logger.critical('failed to expand cluster {}'.format(sc_id))
return succeeded
def _delete_availability_set(compute_client, rg_name, as_name):
# type: (azure.mgmt.compute.ComputeManagementClient, str, str) ->
# msrestazure.azure_operation.AzureOperationPoller
"""Delete an availability set
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param str rg_name: resource group name
:param str as_name: availability set name
:rtype: msrestazure.azure_operation.AzureOperationPoller
:return: async op poller
"""
logger.debug('deleting availability set {}'.format(as_name))
return compute_client.availability_sets.delete(
resource_group_name=rg_name,
availability_set_name=as_name,
)
def delete_storage_cluster(
resource_client, compute_client, network_client, blob_client, config,
sc_id, delete_data_disks=False, delete_virtual_network=False,
delete_resource_group=False, generate_from_prefix=False, wait=False):
# type: (azure.mgmt.resource.resources.ResourceManagementClient,
# azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient,
# azure.storage.blob.BlockBlobService, dict, str, bool,
# bool, bool, bool, bool) -> None
"""Delete a storage cluster
:param azure.mgmt.resource.resources.ResourceManagementClient
resource_client: resource client
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param azure.storage.blob.BlockBlobService blob_client: blob client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param bool delete_data_disks: delete managed data disks
:param bool delete_virtual_network: delete vnet
:param bool delete_resource_group: delete resource group
:param bool generate_from_prefix: generate resources from hostname prefix
:param bool wait: wait for completion
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# delete rg if specified
if delete_resource_group:
if util.confirm_action(
config, 'delete resource group {}'.format(
rfs.storage_cluster.resource_group)):
logger.info('deleting resource group {}'.format(
rfs.storage_cluster.resource_group))
async_delete = resource_client.resource_groups.delete(
resource_group_name=rfs.storage_cluster.resource_group)
if wait:
logger.debug('waiting for resource group {} to delete'.format(
rfs.storage_cluster.resource_group))
async_delete.result()
logger.info('resource group {} deleted'.format(
rfs.storage_cluster.resource_group))
return
if not util.confirm_action(
config, 'delete storage cluster {}'.format(sc_id)):
return
# get vms and cache for concurent async ops
resources = {}
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
logger.warning('virtual machine {} not found'.format(vm_name))
if generate_from_prefix:
logger.warning(
'OS and data disks for this virtual machine will not '
'be deleted, please use "fs disks del" to delete '
'those resources if desired')
resources[i] = {
'vm': settings.generate_virtual_machine_name(
rfs.storage_cluster, i),
'as': None,
'nic': settings.generate_network_interface_name(
rfs.storage_cluster, i),
'pip': settings.generate_public_ip_name(
rfs.storage_cluster, i),
'subnet': None,
'nsg': settings.generate_network_security_group_name(
rfs.storage_cluster),
'vnet': None,
'os_disk': None,
'data_disks': [],
}
if rfs.storage_cluster.vm_count > 1:
resources[i]['as'] = \
settings.generate_availability_set_name(
rfs.storage_cluster)
continue
else:
raise
else:
# get resources connected to vm
nic, pip, subnet, vnet, nsg = \
resource.get_resource_names_from_virtual_machine(
compute_client, network_client, rfs.storage_cluster, vm)
resources[i] = {
'vm': vm.name,
'arm_id': vm.id,
'id': vm.vm_id,
'as': None,
'nic': nic,
'pip': pip,
'subnet': subnet,
'nsg': nsg,
'vnet': vnet,
'os_disk': vm.storage_profile.os_disk.name,
'data_disks': [],
}
# populate availability set
if vm.availability_set is not None:
resources[i]['as'] = vm.availability_set.id.split('/')[-1]
# populate data disks
if delete_data_disks:
for disk in vm.storage_profile.data_disks:
resources[i]['data_disks'].append(disk.name)
# unset virtual network if not specified to delete
if not delete_virtual_network:
resources[i]['subnet'] = None
resources[i]['vnet'] = None
if len(resources) == 0:
logger.warning('no resources deleted')
return
if settings.verbose(config):
logger.debug('deleting the following resources:{}{}'.format(
os.linesep, json.dumps(resources, sort_keys=True, indent=4)))
# delete storage container
storage.delete_storage_containers_nonbatch(blob_client, None, 'remotefs')
# create async op holder
async_ops = {}
# delete vms
async_ops['vms'] = {}
for key in resources:
vm_name = resources[key]['vm']
async_ops['vms'][vm_name] = resource.AsyncOperation(functools.partial(
resource.delete_virtual_machine, compute_client,
rfs.storage_cluster.resource_group, vm_name), retry_conflict=True)
logger.info(
'waiting for {} virtual machines to delete'.format(
len(async_ops['vms'])))
for vm_name in async_ops['vms']:
async_ops['vms'][vm_name].result()
logger.info('{} virtual machines deleted'.format(len(async_ops['vms'])))
# delete nics
async_ops['nics'] = {}
for key in resources:
nic_name = resources[key]['nic']
async_ops['nics'][nic_name] = resource.AsyncOperation(
functools.partial(
resource.delete_network_interface, network_client,
rfs.storage_cluster.resource_group, nic_name),
retry_conflict=True
)
# wait for nics to delete
logger.debug('waiting for {} network interfaces to delete'.format(
len(async_ops['nics'])))
for nic_name in async_ops['nics']:
async_ops['nics'][nic_name].result()
logger.info('{} network interfaces deleted'.format(len(async_ops['nics'])))
# delete data disks if specified
async_ops['data_disks'] = []
for key in resources:
data_disks = resources[key]['data_disks']
if util.is_none_or_empty(data_disks):
continue
if len(data_disks) > 0:
async_ops['data_disks'].append(delete_managed_disks(
resource_client, compute_client, config, data_disks,
resource_group=rfs.managed_disks.resource_group, wait=False))
# delete os disks
async_ops['os_disk'] = []
for key in resources:
os_disk = resources[key]['os_disk']
if util.is_none_or_empty(os_disk):
continue
async_ops['os_disk'].append(delete_managed_disks(
resource_client, compute_client, config, os_disk,
resource_group=rfs.storage_cluster.resource_group, wait=False,
confirm_override=True))
# delete nsg
deleted = set()
async_ops['nsg'] = {}
for key in resources:
nsg_name = resources[key]['nsg']
if nsg_name in deleted:
continue
deleted.add(nsg_name)
async_ops['nsg'][nsg_name] = resource.AsyncOperation(functools.partial(
resource.delete_network_security_group, network_client,
rfs.storage_cluster.resource_group, nsg_name), retry_conflict=True)
deleted.clear()
# delete public ips
async_ops['pips'] = {}
for key in resources:
pip_name = resources[key]['pip']
if util.is_none_or_empty(pip_name):
continue
async_ops['pips'][pip_name] = resource.AsyncOperation(
functools.partial(
resource.delete_public_ip, network_client,
rfs.storage_cluster.resource_group, pip_name),
retry_conflict=True
)
logger.debug('waiting for {} public ips to delete'.format(
len(async_ops['pips'])))
for pip_name in async_ops['pips']:
async_ops['pips'][pip_name].result()
logger.info('{} public ips deleted'.format(len(async_ops['pips'])))
# delete subnets
async_ops['subnets'] = {}
for key in resources:
subnet_name = resources[key]['subnet']
vnet_name = resources[key]['vnet']
if util.is_none_or_empty(subnet_name) or subnet_name in deleted:
continue
deleted.add(subnet_name)
async_ops['subnets'][subnet_name] = resource.AsyncOperation(
functools.partial(
resource.delete_subnet, network_client,
rfs.storage_cluster.virtual_network.resource_group,
vnet_name, subnet_name),
retry_conflict=True
)
logger.debug('waiting for {} subnets to delete'.format(
len(async_ops['subnets'])))
for subnet_name in async_ops['subnets']:
async_ops['subnets'][subnet_name].result()
logger.info('{} subnets deleted'.format(len(async_ops['subnets'])))
deleted.clear()
# delete vnet
async_ops['vnets'] = {}
for key in resources:
vnet_name = resources[key]['vnet']
if util.is_none_or_empty(vnet_name) or vnet_name in deleted:
continue
deleted.add(vnet_name)
async_ops['vnets'][vnet_name] = resource.AsyncOperation(
functools.partial(
resource.delete_virtual_network, network_client,
rfs.storage_cluster.virtual_network.resource_group, vnet_name),
retry_conflict=True
)
deleted.clear()
# delete availability set, this is synchronous
for key in resources:
as_name = resources[key]['as']
if util.is_none_or_empty(as_name) or as_name in deleted:
continue
deleted.add(as_name)
_delete_availability_set(
compute_client, rfs.storage_cluster.resource_group, as_name)
logger.info('availability set {} deleted'.format(as_name))
deleted.clear()
# delete boot diagnostics storage containers
for key in resources:
try:
vm_name = resources[key]['vm']
vm_id = resources[key]['id']
except KeyError:
pass
else:
storage.delete_storage_containers_boot_diagnostics(
blob_client, vm_name, vm_id)
# wait for all async ops to complete
if wait:
logger.debug('waiting for network security groups to delete')
for nsg_name in async_ops['nsg']:
async_ops['nsg'][nsg_name].result()
logger.info('{} network security groups deleted'.format(
len(async_ops['nsg'])))
logger.debug('waiting for virtual networks to delete')
for vnet_name in async_ops['vnets']:
async_ops['vnets'][vnet_name].result()
logger.info('{} virtual networks deleted'.format(
len(async_ops['vnets'])))
logger.debug('waiting for managed os disks to delete')
count = 0
for os_disk_set in async_ops['os_disk']:
for os_disk in os_disk_set:
os_disk_set[os_disk].result()
count += 1
logger.info('{} managed os disks deleted'.format(count))
if len(async_ops['data_disks']) > 0:
logger.debug('waiting for managed data disks to delete')
count = 0
for data_disk_set in async_ops['data_disks']:
for data_disk in data_disk_set:
data_disk_set[data_disk].result()
count += 1
logger.info('{} managed data disks deleted'.format(count))
def suspend_storage_cluster(compute_client, config, sc_id, wait=False):
# type: (azure.mgmt.compute.ComputeManagementClient, dict, str,
# bool) -> None
"""Suspend a storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param bool wait: wait for suspension to complete
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
vms = []
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
logger.error('virtual machine {} not found'.format(vm_name))
continue
else:
raise
else:
vms.append(vm)
if len(vms) == 0:
logger.warning('no virtual machines to suspend')
return
# check if glusterfs and warn
if rfs.storage_cluster.file_server.type == 'glusterfs':
logger.warning(
'**WARNING** Suspending a glusterfs cluster is risky. Depending '
'upon the volume type and state of the bricks at the time of '
'suspension, a variety of issues can occur such as: unsuccessful '
'restart of the cluster, split-brain states, or even data loss.')
if not util.confirm_action(
config, 'suspend storage cluster {}'.format(sc_id)):
return
# deallocate each vm
async_ops = {}
for vm in vms:
async_ops[vm.name] = resource.AsyncOperation(functools.partial(
resource.deallocate_virtual_machine, compute_client,
rfs.storage_cluster.resource_group, vm.name), retry_conflict=True)
if wait:
logger.info(
'waiting for {} virtual machines to deallocate'.format(
len(async_ops)))
for vm_name in async_ops:
async_ops[vm_name].result()
logger.info('{} virtual machines deallocated'.format(len(async_ops)))
def start_storage_cluster(compute_client, config, sc_id, wait=False):
# type: (azure.mgmt.compute.ComputeManagementClient, dict, str,
# bool) -> None
"""Starts a suspended storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param bool wait: wait for restart to complete
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
vms = []
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
raise RuntimeError(
'virtual machine {} not found'.format(vm_name))
else:
raise
else:
vms.append(vm)
if len(vms) == 0:
logger.error('no virtual machines to restart')
return
if not util.confirm_action(
config, 'start suspended storage cluster {}'.format(sc_id)):
return
# start each vm
async_ops = {}
for vm in vms:
async_ops[vm.name] = resource.AsyncOperation(functools.partial(
resource.start_virtual_machine, compute_client,
rfs.storage_cluster.resource_group, vm.name))
if wait:
logger.info(
'waiting for {} virtual machines to start'.format(len(async_ops)))
for vm_name in async_ops:
async_ops[vm_name].result()
logger.info('{} virtual machines started'.format(len(async_ops)))
def stat_storage_cluster(
compute_client, network_client, config, sc_id, status_script,
detail=False, hosts=False):
# type: (azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient, dict, str, str,
# bool, bool) -> None
"""Retrieve status of a storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param str status_script: status script
:param bool detail: detailed status
:param bool hosts: dump info for /etc/hosts
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# retrieve all vms
vms = []
for i in range(rfs.storage_cluster.vm_count):
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, i)
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
expand=compute_client.virtual_machines.models.
InstanceViewTypes.instance_view,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
logger.error('virtual machine {} not found'.format(vm_name))
else:
raise
else:
vms.append((vm, i))
if len(vms) == 0:
logger.error(
'no virtual machines to query for storage cluster {}'.format(
sc_id))
return
# fetch vm status
fsstatus = []
vmstatus = {}
for vm, offset in vms:
powerstate = None
for status in vm.instance_view.statuses:
if status.code.startswith('PowerState'):
powerstate = status.code
diskstates = []
if util.is_not_empty(vm.instance_view.disks):
for disk in vm.instance_view.disks:
for status in disk.statuses:
diskstates.append(status.code)
# get nic/pip connected to vm
nic, pip = resource.get_nic_and_pip_from_virtual_machine(
network_client, rfs.storage_cluster.resource_group, vm)
# get resource names (pass cached data to prevent another lookup)
_, _, subnet, vnet, nsg = \
resource.get_resource_names_from_virtual_machine(
compute_client, network_client, rfs.storage_cluster, vm,
nic=nic, pip=pip)
# stat data disks
disks = {}
total_size_gb = 0
for dd in vm.storage_profile.data_disks:
total_size_gb += dd.disk_size_gb
disks[dd.name] = {
'lun': dd.lun,
'caching': str(dd.caching),
'disk_size_gb': dd.disk_size_gb,
'type': str(dd.managed_disk.storage_account_type),
}
disks['disk_array_size_gb'] = total_size_gb
# detailed settings: run stat script via ssh
if detail:
ssh_priv_key, port, username, ip = _get_ssh_info(
compute_client, network_client, config, sc_id, None, vm.name,
nic=nic, pip=pip)
offset = settings.get_offset_from_virtual_machine_name(vm.name)
script_cmd = '/opt/batch-shipyard/{sf} {c}{f}{m}{n}{r}{s}'.format(
sf=status_script,
c=' -c' if util.is_not_empty(
rfs.storage_cluster.file_server.samba.share_name) else '',
f=' -f {}'.format(
rfs.storage_cluster.vm_disk_map[offset].filesystem),
m=' -m {}'.format(
rfs.storage_cluster.file_server.mountpoint),
n=' -n {}'.format(
settings.get_file_server_glusterfs_volume_name(
rfs.storage_cluster)),
r=' -r {}'.format(
rfs.storage_cluster.vm_disk_map[offset].raid_level),
s=' -s {}'.format(rfs.storage_cluster.file_server.type),
)
proc = crypto.connect_or_exec_ssh_command(
ip, port, ssh_priv_key, username, sync=False,
command=['sudo', script_cmd])
stdout = proc.communicate()[0]
if util.is_not_empty(stdout):
if util.on_python2():
stdout = stdout.decode('utf8')
if util.on_windows():
stdout = stdout.replace('\n', os.linesep)
fsstatfmt = '>> File Server Status for {} ec={}:{}{}'
if util.on_python2():
fsstatfmt = unicode(fsstatfmt) # noqa
fsstatus.append(
fsstatfmt.format(vm.name, proc.returncode, os.linesep, stdout))
vmstatus[vm.name] = {
'vm_size': vm.hardware_profile.vm_size,
'powerstate': powerstate,
'provisioning_state': vm.provisioning_state,
'availability_set':
vm.availability_set.id.split('/')[-1]
if vm.availability_set is not None else None,
'update_domain/fault_domain': '{}/{}'.format(
vm.instance_view.platform_update_domain,
vm.instance_view.platform_fault_domain),
'fqdn': pip.dns_settings.fqdn if pip is not None else None,
'public_ip_address': pip.ip_address if pip is not None else None,
'public_ip_allocation':
pip.public_ip_allocation_method if pip is not None else None,
'private_ip_address': nic.ip_configurations[0].private_ip_address,
'private_ip_allocation':
nic.ip_configurations[0].private_ip_allocation_method,
'admin_username': vm.os_profile.admin_username,
'accelerated_networking': nic.enable_accelerated_networking,
'virtual_network': vnet,
'subnet': subnet,
'network_security_group': nsg,
'data_disks': disks,
}
if detail:
log = '{}{}{}{}'.format(
json.dumps(vmstatus, sort_keys=True, indent=4),
os.linesep, os.linesep,
'{}{}'.format(os.linesep, os.linesep).join(
fsstatus) if detail else '')
else:
log = '{}'.format(json.dumps(vmstatus, sort_keys=True, indent=4))
logger.info('storage cluster {} virtual machine status:{}{}'.format(
sc_id, os.linesep, log))
if hosts:
if rfs.storage_cluster.file_server.type != 'glusterfs':
raise ValueError('hosts option not compatible with glusterfs')
print(('{}>> Ensure that you have enabled the "glusterfs" network '
'security rule.{}>> Add the following entries to your '
'/etc/hosts to mount the gluster volume.{}>> Mount the '
'source as -t glusterfs from {}:/{}{}'.format(
os.linesep, os.linesep, os.linesep, next(iter(vmstatus)),
settings.get_file_server_glusterfs_volume_name(
rfs.storage_cluster), os.linesep)))
for vmname in vmstatus:
print('{} {}'.format(
vmstatus[vmname]['public_ip_address'], vmname))
def _get_ssh_info(
compute_client, network_client, config, sc_id, cardinal, hostname,
nic=None, pip=None):
# type: (azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient, dict, str, int,
# str, networkmodes.NetworkInterface,
# networkmodels.PublicIPAddress) ->
# Tuple[pathlib.Path, int, str, str]
"""SSH to a node in storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param int cardinal: cardinal number
:param str hostname: hostname
:param networkmodels.NetworkInterface nic: network interface
:param networkmodels.PublicIPAddress pip: public ip
:rtype: tuple
:return (ssh private key, port, username, ip)
"""
# retrieve remotefs settings
if util.is_none_or_empty(sc_id):
raise ValueError('storage cluster id not specified')
rfs = settings.remotefs_settings(config, sc_id)
# retrieve specific vm
if cardinal is not None:
vm_name = settings.generate_virtual_machine_name(
rfs.storage_cluster, cardinal)
else:
vm_name = hostname
try:
vm = compute_client.virtual_machines.get(
resource_group_name=rfs.storage_cluster.resource_group,
vm_name=vm_name,
)
except msrestazure.azure_exceptions.CloudError as e:
if e.status_code == 404:
raise RuntimeError('virtual machine {} not found'.format(vm_name))
else:
raise
# get connection ip
if rfs.storage_cluster.public_ip.enabled:
# get pip connected to vm
if pip is None:
_, pip = resource.get_nic_and_pip_from_virtual_machine(
network_client, rfs.storage_cluster.resource_group, vm)
ip_address = pip.ip_address
else:
if nic is None:
nic, _ = resource.get_nic_and_pip_from_virtual_machine(
network_client, rfs.storage_cluster.resource_group, vm)
ip_address = nic.ip_configurations[0].private_ip_address
# return connection info for vm
if rfs.storage_cluster.ssh.ssh_private_key is not None:
ssh_priv_key = rfs.storage_cluster.ssh.ssh_private_key
else:
ssh_priv_key = pathlib.Path(
rfs.storage_cluster.ssh.generated_file_export_path,
crypto.get_remotefs_ssh_key_prefix())
if not ssh_priv_key.exists():
raise RuntimeError('SSH private key file not found at: {}'.format(
ssh_priv_key))
return ssh_priv_key, 22, vm.os_profile.admin_username, ip_address
def ssh_storage_cluster(
compute_client, network_client, config, sc_id, cardinal, hostname,
tty, command):
# type: (azure.mgmt.compute.ComputeManagementClient,
# azure.mgmt.network.NetworkManagementClient, dict, str, int,
# str, bool, tuple) -> None
"""SSH to a node in storage cluster
:param azure.mgmt.compute.ComputeManagementClient compute_client:
compute client
:param azure.mgmt.network.NetworkManagementClient network_client:
network client
:param dict config: configuration dict
:param str sc_id: storage cluster id
:param int cardinal: cardinal number
:param str hostname: hostname
:param bool tty: allocate pseudo-tty
:param tuple command: command to execute
"""
ssh_priv_key, port, username, ip = _get_ssh_info(
compute_client, network_client, config, sc_id, cardinal, hostname)
crypto.connect_or_exec_ssh_command(
ip, port, ssh_priv_key, username, tty=tty, command=command)
| 43.02214
| 79
| 0.62207
|
3a1fe1d9d3e28dc7897c0c9bdc47321a3c182cd7
| 13,130
|
py
|
Python
|
tests/test_config_entries.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_config_entries.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | null | null | null |
tests/test_config_entries.py
|
mfrueh/home-assistant
|
5d64628b5bf4713016883282fd54de9c7d5089d0
|
[
"Apache-2.0"
] | 1
|
2021-03-13T18:15:31.000Z
|
2021-03-13T18:15:31.000Z
|
"""Test the config manager."""
import asyncio
from unittest.mock import MagicMock, patch, mock_open
import pytest
import voluptuous as vol
from homeassistant import config_entries, loader
from homeassistant.setup import async_setup_component
from tests.common import MockModule, mock_coro, MockConfigEntry
@pytest.fixture
def manager(hass):
"""Fixture of a loaded config manager."""
manager = config_entries.ConfigEntries(hass, {})
manager._entries = []
hass.config_entries = manager
return manager
@asyncio.coroutine
def test_call_setup_entry(hass):
"""Test we call <component>.setup_entry."""
MockConfigEntry(domain='comp').add_to_hass(hass)
mock_setup_entry = MagicMock(return_value=mock_coro(True))
loader.set_component(
'comp',
MockModule('comp', async_setup_entry=mock_setup_entry))
result = yield from async_setup_component(hass, 'comp', {})
assert result
assert len(mock_setup_entry.mock_calls) == 1
@asyncio.coroutine
def test_remove_entry(manager):
"""Test that we can remove an entry."""
mock_unload_entry = MagicMock(return_value=mock_coro(True))
loader.set_component(
'test',
MockModule('comp', async_unload_entry=mock_unload_entry))
MockConfigEntry(domain='test', entry_id='test1').add_to_manager(manager)
MockConfigEntry(domain='test', entry_id='test2').add_to_manager(manager)
MockConfigEntry(domain='test', entry_id='test3').add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == \
['test1', 'test2', 'test3']
result = yield from manager.async_remove('test2')
assert result == {
'require_restart': False
}
assert [item.entry_id for item in manager.async_entries()] == \
['test1', 'test3']
assert len(mock_unload_entry.mock_calls) == 1
@asyncio.coroutine
def test_remove_entry_raises(manager):
"""Test if a component raises while removing entry."""
@asyncio.coroutine
def mock_unload_entry(hass, entry):
"""Mock unload entry function."""
raise Exception("BROKEN")
loader.set_component(
'test',
MockModule('comp', async_unload_entry=mock_unload_entry))
MockConfigEntry(domain='test', entry_id='test1').add_to_manager(manager)
MockConfigEntry(domain='test', entry_id='test2').add_to_manager(manager)
MockConfigEntry(domain='test', entry_id='test3').add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == \
['test1', 'test2', 'test3']
result = yield from manager.async_remove('test2')
assert result == {
'require_restart': True
}
assert [item.entry_id for item in manager.async_entries()] == \
['test1', 'test3']
@asyncio.coroutine
def test_add_entry_calls_setup_entry(hass, manager):
"""Test we call setup_config_entry."""
mock_setup_entry = MagicMock(return_value=mock_coro(True))
loader.set_component(
'comp',
MockModule('comp', async_setup_entry=mock_setup_entry))
class TestFlow(config_entries.ConfigFlowHandler):
VERSION = 1
@asyncio.coroutine
def async_step_init(self, user_input=None):
return self.async_create_entry(
title='title',
data={
'token': 'supersecret'
})
with patch.dict(config_entries.HANDLERS, {'comp': TestFlow}):
yield from manager.flow.async_init('comp')
yield from hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.data == {
'token': 'supersecret'
}
@asyncio.coroutine
def test_entries_gets_entries(manager):
"""Test entries are filtered by domain."""
MockConfigEntry(domain='test').add_to_manager(manager)
entry1 = MockConfigEntry(domain='test2')
entry1.add_to_manager(manager)
entry2 = MockConfigEntry(domain='test2')
entry2.add_to_manager(manager)
assert manager.async_entries('test2') == [entry1, entry2]
@asyncio.coroutine
def test_domains_gets_uniques(manager):
"""Test we only return each domain once."""
MockConfigEntry(domain='test').add_to_manager(manager)
MockConfigEntry(domain='test2').add_to_manager(manager)
MockConfigEntry(domain='test2').add_to_manager(manager)
MockConfigEntry(domain='test').add_to_manager(manager)
MockConfigEntry(domain='test3').add_to_manager(manager)
assert manager.async_domains() == ['test', 'test2', 'test3']
@asyncio.coroutine
def test_saving_and_loading(hass):
"""Test that we're saving and loading correctly."""
class TestFlow(config_entries.ConfigFlowHandler):
VERSION = 5
@asyncio.coroutine
def async_step_init(self, user_input=None):
return self.async_create_entry(
title='Test Title',
data={
'token': 'abcd'
}
)
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
yield from hass.config_entries.flow.async_init('test')
class Test2Flow(config_entries.ConfigFlowHandler):
VERSION = 3
@asyncio.coroutine
def async_step_init(self, user_input=None):
return self.async_create_entry(
title='Test 2 Title',
data={
'username': 'bla'
}
)
json_path = 'homeassistant.util.json.open'
with patch('homeassistant.config_entries.HANDLERS.get',
return_value=Test2Flow), \
patch.object(config_entries, 'SAVE_DELAY', 0):
yield from hass.config_entries.flow.async_init('test')
with patch(json_path, mock_open(), create=True) as mock_write:
# To trigger the call_later
yield from asyncio.sleep(0, loop=hass.loop)
# To execute the save
yield from hass.async_block_till_done()
# Mock open calls are: open file, context enter, write, context leave
written = mock_write.mock_calls[2][1][0]
# Now load written data in new config manager
manager = config_entries.ConfigEntries(hass, {})
with patch('os.path.isfile', return_value=True), \
patch(json_path, mock_open(read_data=written), create=True):
yield from manager.async_load()
# Ensure same order
for orig, loaded in zip(hass.config_entries.async_entries(),
manager.async_entries()):
assert orig.version == loaded.version
assert orig.domain == loaded.domain
assert orig.title == loaded.title
assert orig.data == loaded.data
assert orig.source == loaded.source
#######################
# FLOW MANAGER TESTS #
#######################
@asyncio.coroutine
def test_configure_reuses_handler_instance(manager):
"""Test that we reuse instances."""
class TestFlow(config_entries.ConfigFlowHandler):
handle_count = 0
@asyncio.coroutine
def async_step_init(self, user_input=None):
self.handle_count += 1
return self.async_show_form(
title=str(self.handle_count),
step_id='init')
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
form = yield from manager.flow.async_init('test')
assert form['title'] == '1'
form = yield from manager.flow.async_configure(form['flow_id'])
assert form['title'] == '2'
assert len(manager.flow.async_progress()) == 1
assert len(manager.async_entries()) == 0
@asyncio.coroutine
def test_configure_two_steps(manager):
"""Test that we reuse instances."""
class TestFlow(config_entries.ConfigFlowHandler):
VERSION = 1
@asyncio.coroutine
def async_step_init(self, user_input=None):
if user_input is not None:
self.init_data = user_input
return self.async_step_second()
return self.async_show_form(
title='title',
step_id='init',
data_schema=vol.Schema([str])
)
@asyncio.coroutine
def async_step_second(self, user_input=None):
if user_input is not None:
return self.async_create_entry(
title='Test Entry',
data=self.init_data + user_input
)
return self.async_show_form(
title='title',
step_id='second',
data_schema=vol.Schema([str])
)
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
form = yield from manager.flow.async_init('test')
with pytest.raises(vol.Invalid):
form = yield from manager.flow.async_configure(
form['flow_id'], 'INCORRECT-DATA')
form = yield from manager.flow.async_configure(
form['flow_id'], ['INIT-DATA'])
form = yield from manager.flow.async_configure(
form['flow_id'], ['SECOND-DATA'])
assert form['type'] == config_entries.RESULT_TYPE_CREATE_ENTRY
assert len(manager.flow.async_progress()) == 0
assert len(manager.async_entries()) == 1
entry = manager.async_entries()[0]
assert entry.domain == 'test'
assert entry.data == ['INIT-DATA', 'SECOND-DATA']
@asyncio.coroutine
def test_show_form(manager):
"""Test that abort removes the flow from progress."""
schema = vol.Schema({
vol.Required('username'): str,
vol.Required('password'): str
})
class TestFlow(config_entries.ConfigFlowHandler):
@asyncio.coroutine
def async_step_init(self, user_input=None):
return self.async_show_form(
title='Hello form',
step_id='init',
description='test-description',
data_schema=schema,
errors={
'username': 'Should be unique.'
}
)
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
form = yield from manager.flow.async_init('test')
assert form['type'] == 'form'
assert form['title'] == 'Hello form'
assert form['description'] == 'test-description'
assert form['data_schema'] is schema
assert form['errors'] == {
'username': 'Should be unique.'
}
@asyncio.coroutine
def test_abort_removes_instance(manager):
"""Test that abort removes the flow from progress."""
class TestFlow(config_entries.ConfigFlowHandler):
is_new = True
@asyncio.coroutine
def async_step_init(self, user_input=None):
old = self.is_new
self.is_new = False
return self.async_abort(reason=str(old))
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
form = yield from manager.flow.async_init('test')
assert form['reason'] == 'True'
assert len(manager.flow.async_progress()) == 0
assert len(manager.async_entries()) == 0
form = yield from manager.flow.async_init('test')
assert form['reason'] == 'True'
assert len(manager.flow.async_progress()) == 0
assert len(manager.async_entries()) == 0
@asyncio.coroutine
def test_create_saves_data(manager):
"""Test creating a config entry."""
class TestFlow(config_entries.ConfigFlowHandler):
VERSION = 5
@asyncio.coroutine
def async_step_init(self, user_input=None):
return self.async_create_entry(
title='Test Title',
data='Test Data'
)
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
yield from manager.flow.async_init('test')
assert len(manager.flow.async_progress()) == 0
assert len(manager.async_entries()) == 1
entry = manager.async_entries()[0]
assert entry.version == 5
assert entry.domain == 'test'
assert entry.title == 'Test Title'
assert entry.data == 'Test Data'
assert entry.source == config_entries.SOURCE_USER
@asyncio.coroutine
def test_discovery_init_flow(manager):
"""Test a flow initialized by discovery."""
class TestFlow(config_entries.ConfigFlowHandler):
VERSION = 5
@asyncio.coroutine
def async_step_discovery(self, info):
return self.async_create_entry(title=info['id'], data=info)
data = {
'id': 'hello',
'token': 'secret'
}
with patch.dict(config_entries.HANDLERS, {'test': TestFlow}):
yield from manager.flow.async_init(
'test', source=config_entries.SOURCE_DISCOVERY, data=data)
assert len(manager.flow.async_progress()) == 0
assert len(manager.async_entries()) == 1
entry = manager.async_entries()[0]
assert entry.version == 5
assert entry.domain == 'test'
assert entry.title == 'hello'
assert entry.data == data
assert entry.source == config_entries.SOURCE_DISCOVERY
| 32.98995
| 76
| 0.634349
|
80f3830515a6e320c9b5072f3f17dc83336400b8
| 515
|
py
|
Python
|
tests/test_reticulate.py
|
joshuaulrich/rchitect
|
7e56f507d5ef83e7ef54450668f2a31eb698ea5d
|
[
"MIT"
] | 32
|
2018-12-29T08:44:18.000Z
|
2021-12-08T04:08:21.000Z
|
tests/test_reticulate.py
|
joshuaulrich/rchitect
|
7e56f507d5ef83e7ef54450668f2a31eb698ea5d
|
[
"MIT"
] | 14
|
2018-12-18T16:41:09.000Z
|
2022-02-06T05:03:44.000Z
|
tests/test_reticulate.py
|
joshuaulrich/rchitect
|
7e56f507d5ef83e7ef54450668f2a31eb698ea5d
|
[
"MIT"
] | 4
|
2019-04-06T20:06:50.000Z
|
2021-02-04T20:30:16.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rchitect import reval, rcopy, rcall, robject
import string
def test_rcopy_reticulate_object():
reval("library(reticulate)")
py_object = reval("r_to_py(LETTERS)")
assert rcopy(py_object) == list(string.ascii_uppercase)
class Foo():
pass
def test_r_to_py_rchitect_object():
reval("library(reticulate)")
foo = Foo()
x = rcall("r_to_py", robject(foo))
assert "python.builtin.object" in rcopy(rcall("class", x))
| 22.391304
| 62
| 0.700971
|
ca4bb53f33ec49368717929259a4be90cc431bf0
| 1,545
|
py
|
Python
|
trees/count_leaves_in_bt.py
|
rjsnh1522/geeks-4-geeks-python
|
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
|
[
"MIT"
] | null | null | null |
trees/count_leaves_in_bt.py
|
rjsnh1522/geeks-4-geeks-python
|
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
|
[
"MIT"
] | 5
|
2021-03-10T11:49:39.000Z
|
2022-02-27T01:35:59.000Z
|
trees/count_leaves_in_bt.py
|
rjsnh1522/geeks-4-geeks-python
|
9bea0ce4f3fae9b5f9e5952fb5b4b3a8c6186cf4
|
[
"MIT"
] | null | null | null |
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
# Driver Program
if __name__ == '__main__':
root = None
t = int(input())
for i in range(t):
n = int(input())
arr = input().strip().split()
if n == 0:
print(0)
continue
dictTree = dict()
for j in range(n):
if arr[3 * j] not in dictTree:
dictTree[arr[3 * j]] = Node(arr[3 * j])
parent = dictTree[arr[3 * j]]
if j is 0:
root = parent
else:
parent = dictTree[arr[3 * j]]
child = Node(arr[3 * j + 1])
if (arr[3 * j + 2] == 'L'):
parent.left = child
else:
parent.right = child
dictTree[arr[3 * j + 1]] = child
print(countLeaves(root))
''' This is a function problem.You only need to complete the function given below '''
# User function Template for python3
'''
class Node:
def __init__(self, val):
self.right = None
self.data = val
self.left = None
'''
# your task is to complete this function
# function should return the count of Leaf node's
# Note: You required to print a new line after every test case
def countLeaves(root):
# Code here
if root is None:
return
if (root.left is None) and (root.right is None):
return 1
else:
return countLeaves(root.left) + countLeaves(root.right)
| 24.140625
| 85
| 0.517799
|
80ad51af2350fdf069287324df0370d614cbca56
| 832
|
py
|
Python
|
WEEKS/CD_Sata-Structures/_MISC/algorithms/maths/decimal_to_binary_ip.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/algorithms/maths/decimal_to_binary_ip.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
WEEKS/CD_Sata-Structures/_MISC/algorithms/maths/decimal_to_binary_ip.py
|
webdevhub42/Lambda
|
b04b84fb5b82fe7c8b12680149e25ae0d27a0960
|
[
"MIT"
] | null | null | null |
##-------------------------------------------------------------------
"""
Given an ip address in dotted-decimal representation, determine the
binary representation. For example,
decimal_to_binary(255.0.0.5) returns 11111111.00000000.00000000.00000101
accepts string
returns string
##-------------------------------------------------------------------
"""
def decimal_to_binary_util(val):
bits = [128, 64, 32, 16, 8, 4, 2, 1]
val = int(val)
binary_rep = ""
for bit in bits:
if val >= bit:
binary_rep += str(1)
val -= bit
else:
binary_rep += str(0)
return binary_rep
def decimal_to_binary_ip(ip):
values = ip.split(".")
binary_list = []
for val in values:
binary_list.append(decimal_to_binary_util(val))
return ".".join(binary_list)
| 26
| 72
| 0.527644
|
4f9f7434dc02c073926dcedf08c7d8e4fdcb1595
| 1,265
|
py
|
Python
|
tlkit/data/datasets/fashion_mnist_dataset.py
|
jozhang97/Side-tuning
|
dea345691fb7ee0230150fe56ddd644efdffa6ac
|
[
"MIT"
] | 56
|
2020-01-12T05:45:59.000Z
|
2022-03-17T15:04:15.000Z
|
tlkit/data/datasets/fashion_mnist_dataset.py
|
jozhang97/Side-tuning
|
dea345691fb7ee0230150fe56ddd644efdffa6ac
|
[
"MIT"
] | 7
|
2020-01-28T23:14:45.000Z
|
2022-02-10T01:56:48.000Z
|
tlkit/data/datasets/fashion_mnist_dataset.py
|
jozhang97/Side-tuning
|
dea345691fb7ee0230150fe56ddd644efdffa6ac
|
[
"MIT"
] | 2
|
2020-02-29T14:51:23.000Z
|
2020-03-07T03:23:27.000Z
|
import torchvision
from torch.utils.data import DataLoader
def get_dataloaders(data_path,
inputs_and_outputs,
batch_size=64,
batch_size_val=4,
transform=None,
num_workers=0,
load_to_mem=False,
pin_memory=False):
dataloaders = {}
dataset = torchvision.datasets.FashionMNIST(root, train=True, transform=transform, target_transform=None, download=True)
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers, pin_memory=pin_memory)
dataloaders['train'] = dataloader
dataset = torchvision.datasets.FashionMNIST(root, train=False, transform=transform, target_transform=None, download=False)
dataloader = DataLoader(dataset, batch_size=batch_size_val, shuffle=False, num_workers=num_workers, pin_memory=pin_memory)
dataloaders['val'] = dataloader
dataset = torchvision.datasets.FashionMNIST(root, train=False, transform=transform, target_transform=None, download=False)
dataloader = DataLoader(dataset, batch_size=batch_size_val, shuffle=False, num_workers=num_workers, pin_memory=pin_memory)
dataloaders['test'] = dataloader
return dataloaders
| 50.6
| 126
| 0.710672
|
f61c0047f9aa774f2cb9062c62d01c0bd5a089d1
| 2,709
|
py
|
Python
|
openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_ssh/Opengauss_Function_Tools_gs_ssh_Case0017.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_ssh/Opengauss_Function_Tools_gs_ssh_Case0017.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
openGaussBase/testcase/TOOLS/SERVER_TOOLS/gs_ssh/Opengauss_Function_Tools_gs_ssh_Case0017.py
|
opengauss-mirror/Yat
|
aef107a8304b94e5d99b4f1f36eb46755eb8919e
|
[
"MulanPSL-1.0"
] | null | null | null |
"""
Copyright (c) 2022 Huawei Technologies Co.,Ltd.
openGauss is licensed under Mulan PSL v2.
You can use this software according to the terms and conditions of the Mulan PSL v2.
You may obtain a copy of Mulan PSL v2 at:
http://license.coscl.org.cn/MulanPSL2
THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
See the Mulan PSL v2 for more details.
"""
"""
Case Type : 服务端工具
Case Name : 在openGauss各主机上执行的linux
shell命令名:whereis,find,locate,which命令
Description :
1.查找locate程序相关文件
2.在当前目录查找 以.log结尾的文件
3.查看ls命令是否存在
4.查看which
Expect :
1.显示正确
2.显示正确
3.显示正确
4.显示正确
History :
"""
import unittest
from yat.test import Node
from yat.test import macro
from testcase.utils.Constant import Constant
from testcase.utils.Logger import Logger
Log = Logger()
class Tools(unittest.TestCase):
def setUp(self):
Log.info('----Opengauss_Function_Tools_gs_ssh_Case0017开始执行-----')
self.dbuser_node = Node('dbuser')
self.constant = Constant()
def test_server_tools1(self):
Log.info('-------步骤1.查找locate程序相关文件---------')
check_cmd = f'source {macro.DB_ENV_PATH};' \
f'gs_ssh -c "whereis locate"'
Log.info(check_cmd)
msg = self.dbuser_node.sh(check_cmd).result()
Log.info(msg)
self.assertIn(self.constant.gs_ssh_success_msg, msg)
self.assertIn('locate:', msg)
Log.info('-------步骤2.在当前目录查找 以.log结尾的文件-------')
find_cmd = f'source {macro.DB_ENV_PATH};' \
f'gs_ssh -c "find ./ -name \'*.log\'"'
Log.info(find_cmd)
find_msg = self.dbuser_node.sh(find_cmd).result()
Log.info(find_msg)
self.assertIn(self.constant.gs_ssh_success_msg, find_msg)
Log.info('-------步骤3.查看ls命令是否存在-------')
which_cmd = f'source {macro.DB_ENV_PATH};' \
f'gs_ssh -c "which ls"'
Log.info(which_cmd)
which_msg = self.dbuser_node.sh(which_cmd).result()
Log.info(which_msg)
self.assertIn(self.constant.gs_ssh_success_msg, which_msg)
Log.info('-------步骤4.查看which-------')
which_cmd = f'source {macro.DB_ENV_PATH};' \
f'gs_ssh -c "which which"'
Log.info(which_cmd)
which_msg = self.dbuser_node.sh(which_cmd).result()
Log.info(which_msg)
self.assertIn(self.constant.gs_ssh_success_msg, which_msg)
def tearDown(self):
Log.info('----------------无需清理环境-----------------------')
Log.info('----Opengauss_Function_Tools_gs_ssh_Case0017执行结束----')
| 31.870588
| 84
| 0.635659
|
518ac140097168a4f025917e5832bce6114604e3
| 198
|
py
|
Python
|
benchmarks/Gaussian/config_gen.py
|
DependableSystemsLab/GPU-Trident
|
c734cd8a18146869fc915af73a6ca13ceca35c0b
|
[
"MIT"
] | 1
|
2021-01-17T10:36:21.000Z
|
2021-01-17T10:36:21.000Z
|
benchmarks/Gaussian/config_gen.py
|
DependableSystemsLab/GPU-Trident
|
c734cd8a18146869fc915af73a6ca13ceca35c0b
|
[
"MIT"
] | null | null | null |
benchmarks/Gaussian/config_gen.py
|
DependableSystemsLab/GPU-Trident
|
c734cd8a18146869fc915af73a6ca13ceca35c0b
|
[
"MIT"
] | null | null | null |
X_threads = 16
Y_threads = 16
Invoc_count = 15
start_index = 0
end_index = 0
src_list = []
SHARED_MEM_USE = False
total_shared_mem_size = 1024
domi_list = [44, 47, 71]
domi_val = [0.4, 0.4, 0.0]
| 14.142857
| 28
| 0.69697
|
73d9d9e793f273ecb1bcae87af181c27f4bcbad3
| 8,918
|
py
|
Python
|
Ensemble_Deep_RVFL.py
|
Xuyang-Huang/Ensemble-Deep-RVFL-python
|
c443ebd67fac986f18d14cee9eecd1109e09cffc
|
[
"MIT"
] | 2
|
2021-03-31T06:15:52.000Z
|
2021-09-27T22:31:34.000Z
|
Ensemble_Deep_RVFL.py
|
Xuyang-Huang/Ensemble-Deep-RVFL-python
|
c443ebd67fac986f18d14cee9eecd1109e09cffc
|
[
"MIT"
] | null | null | null |
Ensemble_Deep_RVFL.py
|
Xuyang-Huang/Ensemble-Deep-RVFL-python
|
c443ebd67fac986f18d14cee9eecd1109e09cffc
|
[
"MIT"
] | null | null | null |
#-- coding: utf-8 --
#@Time : 2021/3/27 20:40
#@Author : HUANG XUYANG
#@Email : xhuang032@e.ntu.edu.sg
#@File : Ensemble_Deep_RVFL.py
#@Software: PyCharm
import numpy as np
import sklearn.datasets as sk_dataset
num_nodes = 2 # Number of enhancement nodes.
regular_para = 1 # Regularization parameter.
weight_random_range = [-1, 1] # Range of random weights.
bias_random_range = [0, 1] # Range of random weights.
num_layer = 2 # Number of hidden layers
class EnsembleDeepRVFL:
"""A ensemble deep RVFL classifier.
Attributes:
n_nodes: An integer of enhancement node number.
lam: A floating number of regularization parameter.
w_random_vec_range: A list, [min, max], the range of generating random weights.
b_random_vec_range: A list, [min, max], the range of generating random bias.
random_weights: A Numpy array shape is [n_feature, n_nodes], weights of neuron.
random_bias: A Numpy array shape is [n_nodes], bias of neuron.
beta: A Numpy array shape is [n_feature + n_nodes, n_class], the projection matrix.
activation: A string of activation name.
n_layer: A integer, N=number of hidden layers.
data_std: A list, store normalization parameters for each layer.
data_mean: A list, store normalization parameters for each layer.
same_feature: A bool, the true means all the features have same meaning and boundary for example: images.
"""
def __init__(self, n_nodes, lam, w_random_vec_range, b_random_vec_range, activation, n_layer, same_feature=False):
self.n_nodes = n_nodes
self.lam = lam
self.w_random_range = w_random_vec_range
self.b_random_range = b_random_vec_range
self.random_weights = []
self.random_bias = []
self.beta = []
a = Activation()
self.activation_function = getattr(a, activation)
self.n_layer = n_layer
self.data_std = [None] * self.n_layer
self.data_mean = [None] * self.n_layer
self.same_feature = same_feature
def train(self, data, label, n_class):
"""
:param data: Training data.
:param label: Training label.
:param n_class: An integer of number of class.
:return: No return
"""
assert len(data.shape) > 1, 'Data shape should be [n, dim].'
assert len(data) == len(label), 'Label number does not match data number.'
assert len(label.shape) == 1, 'Label should be 1-D array.'
n_sample = len(data)
n_feature = len(data[0])
h = data.copy()
data = self.standardize(data, 0)
y = self.one_hot(label, n_class)
for i in range(self.n_layer):
h = self.standardize(h, i) # Normalization data
self.random_weights.append(self.get_random_vectors(len(h[0]), self.n_nodes, self.w_random_range))
self.random_bias.append(self.get_random_vectors(1, self.n_nodes, self.b_random_range))
h = self.activation_function(np.dot(h, self.random_weights[i]) + np.dot(np.ones([n_sample, 1]),
self.random_bias[i]))
d = np.concatenate([h, data], axis=1)
h = d
d = np.concatenate([d, np.ones_like(d[:, 0:1])], axis=1)
if n_sample > (self.n_nodes + n_feature):
self.beta.append(np.linalg.inv((self.lam * np.identity(d.shape[1]) + np.dot(d.T, d))).dot(d.T).dot(y))
else:
self.beta.append(d.T.dot(np.linalg.inv(self.lam * np.identity(n_sample) + np.dot(d, d.T))).dot(y))
def predict(self, data, output_prob=False):
"""
:param data: Predict data.
:param output_prob: A bool number, if True return the raw predict probability, if False return predict class.
:return: Prediction result.
"""
n_sample = len(data)
h = data.copy()
data = self.standardize(data, 0) # Normalization data
results = []
for i in range(self.n_layer):
h = self.standardize(h, i) # Normalization data
h = self.activation_function(np.dot(h, self.random_weights[i]) + np.dot(np.ones([n_sample, 1]),
self.random_bias[i]))
d = np.concatenate([h, data], axis=1)
h = d
d = np.concatenate([d, np.ones_like(d[:, 0:1])], axis=1)
if not output_prob:
results.append(np.argmax(np.dot(d, self.beta[i]), axis=1))
else:
results.append(self.softmax(np.dot(d, self.beta[i])))
if not output_prob:
results = list(map(np.bincount, list(np.array(results).transpose())))
results = np.array(list(map(np.argmax, results)))
return results
def eval(self, data, label):
"""
:param data: Evaluation data.
:param label: Evaluation label.
:return: Accuracy.
"""
assert len(data.shape) > 1, 'Data shape should be [n, dim].'
assert len(data) == len(label), 'Label number does not match data number.'
assert len(label.shape) == 1, 'Label should be 1-D array.'
n_sample = len(data)
h = data.copy()
data = self.standardize(data, 0)
results = []
for i in range(self.n_layer):
h = self.standardize(h, i) # Normalization data
h = self.activation_function(np.dot(h, self.random_weights[i]) + np.dot(np.ones([n_sample, 1]),
self.random_bias[i]))
d = np.concatenate([h, data], axis=1)
h = d
d = np.concatenate([d, np.ones_like(d[:, 0:1])], axis=1)
results.append(np.argmax(np.dot(d, self.beta[i]), axis=1))
results = list(map(np.bincount, list(np.array(results).transpose())))
results = np.array(list(map(np.argmax, results)))
acc = np.sum(np.equal(results, label))/len(label)
return acc
def get_random_vectors(self, m, n, scale_range):
x = (scale_range[1] - scale_range[0]) * np.random.random([m, n]) + scale_range[0]
return x
def one_hot(self, x, n_class):
y = np.zeros([len(x), n_class])
for i in range(len(x)):
y[i, x[i]] = 1
return y
def standardize(self, x, index):
if self.same_feature is True:
if self.data_std[index] is None:
self.data_std[index] = np.maximum(np.std(x), 1/np.sqrt(len(x)))
if self.data_mean[index] is None:
self.data_mean[index] = np.mean(x)
return (x - self.data_mean[index]) / self.data_std[index]
else:
if self.data_std[index] is None:
self.data_std[index] = np.maximum(np.std(x, axis=0), 1/np.sqrt(len(x)))
if self.data_mean[index] is None:
self.data_mean[index] = np.mean(x, axis=0)
return (x - self.data_mean[index]) / self.data_std[index]
def softmax(self, x):
return np.exp(x) / np.repeat((np.sum(np.exp(x), axis=1))[:, np.newaxis], len(x[0]), axis=1)
class Activation:
def sigmoid(self, x):
return 1 / (1 + np.e ** (-x))
def sine(self, x):
return np.sin(x)
def hardlim(self, x):
return (np.sign(x) + 1) / 2
def tribas(self, x):
return np.maximum(1 - np.abs(x), 0)
def radbas(self, x):
return np.exp(-(x**2))
def sign(self, x):
return np.sign(x)
def relu(self, x):
return np.maximum(0, x)
def leaky_relu(self, x):
x[x >= 0] = x[x >= 0]
x[x < 0] = x[x < 0] / 10.0
return x
def prepare_data(proportion):
dataset = sk_dataset.load_breast_cancer()
label = dataset['target']
data = dataset['data']
n_class = len(dataset['target_names'])
shuffle_index = np.arange(len(label))
np.random.shuffle(shuffle_index)
train_number = int(proportion * len(label))
train_index = shuffle_index[:train_number]
val_index = shuffle_index[train_number:]
data_train = data[train_index]
label_train = label[train_index]
data_val = data[val_index]
label_val = label[val_index]
return (data_train, label_train), (data_val, label_val), n_class
if __name__ == '__main__':
train, val, num_class = prepare_data(0.8)
ensemble_deep_rvfl = EnsembleDeepRVFL(num_nodes, regular_para, weight_random_range, bias_random_range, 'relu', num_layer, False)
ensemble_deep_rvfl.train(train[0], train[1], num_class)
prediction = ensemble_deep_rvfl.predict(val[0], output_prob=True)
train_acc = ensemble_deep_rvfl.eval(train[0], train[1])
val_acc = ensemble_deep_rvfl.eval(val[0], val[1])
print('train acc:', train_acc)
print('val acc:', val_acc)
| 38.111111
| 132
| 0.590155
|
061c27afa925f34274d2edbf41cd8bb82ec7c7ec
| 12,957
|
py
|
Python
|
ptsemseg/models/MV3_1_res101.py
|
Spritea/pytorch-semseg-89f4-two-titan
|
656f99ad75a59c25ace2888ea98e93dc209c9afd
|
[
"MIT"
] | null | null | null |
ptsemseg/models/MV3_1_res101.py
|
Spritea/pytorch-semseg-89f4-two-titan
|
656f99ad75a59c25ace2888ea98e93dc209c9afd
|
[
"MIT"
] | null | null | null |
ptsemseg/models/MV3_1_res101.py
|
Spritea/pytorch-semseg-89f4-two-titan
|
656f99ad75a59c25ace2888ea98e93dc209c9afd
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
import torch
from torch.nn import functional as F
models_urls = {
'101_voc': 'https://cloudstor.aarnet.edu.au/plus/s/Owmttk9bdPROwc6/download',
'18_imagenet': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'34_imagenet': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'50_imagenet': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'152_imagenet': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
'101_imagenet': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
}
def maybe_download(model_name, model_url, model_dir=None, map_location=None):
import os, sys
from six.moves import urllib
if model_dir is None:
torch_home = os.path.expanduser(os.getenv('TORCH_HOME', '~/.torch'))
model_dir = os.getenv('TORCH_MODEL_ZOO', os.path.join(torch_home, 'models'))
if not os.path.exists(model_dir):
os.makedirs(model_dir)
filename = '{}.pth.tar'.format(model_name)
cached_file = os.path.join(model_dir, filename)
if not os.path.exists(cached_file):
url = model_url
sys.stderr.write('Downloading: "{}" to {}\n'.format(url, cached_file))
urllib.request.urlretrieve(url, cached_file)
return torch.load(cached_file, map_location=map_location)
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
## wrong!! change ReLU to BN
def conv3x3_bn(in_channel, out_channel):
return nn.Sequential(nn.Conv2d(in_channel, out_channel, kernel_size=3, stride=1, padding=1, bias=False),
nn.ReLU(inplace=True))
class GAU(nn.Module):
def __init__(self, in_size, out_size):
super(GAU, self).__init__()
self.in_size = in_size
self.out_size = out_size
self.conv = nn.Conv2d(in_size*2, out_size, kernel_size=1, stride=1, bias=False)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.bn=nn.BatchNorm2d(in_size)
self.relu=nn.ReLU(inplace=True)
def forward(self, input_low, input_high):
high_size = input_high.size()[2:]
# low channel usually > high channel
# if self.in_size != self.out_size:
# input_low = self.conv(input_low)
upsample_low = F.upsample(input_low, high_size, mode='bilinear')
input_cat = torch.cat([upsample_low, input_high], dim=1)
input_cat=self.conv(input_cat)
input_cat=self.bn(input_cat)
input_cat=self.relu(input_cat)
gp = self.avg_pool(input_cat)
multiply=gp*input_cat
out=multiply+input_cat
return out
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = conv1x1(inplanes, planes)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = conv3x3(planes, planes, stride)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = conv1x1(planes, planes * self.expansion)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class RefineBlock(nn.Module):
def __init__(self, in_channel):
super(RefineBlock, self).__init__()
self.c1 = nn.Conv2d(in_channel, 512, kernel_size=1, stride=1, padding=0, bias=False)
self.c3_1 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1, bias=False)
self.bn = nn.BatchNorm2d(512)
self.relu = nn.ReLU(inplace=True)
self.c3_2 = nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1, bias=False)
def forward(self, x):
x1 = self.c1(x)
x = self.c3_1(x1)
x = self.bn(x)
x = self.relu(x)
x = self.c3_2(x)
out = x1 + x
return out
class FPA(nn.Module):
def __init__(self, in_channel, out_channel):
super(FPA, self).__init__()
self.c15_1 = nn.Conv2d(in_channel, out_channel, kernel_size=15, stride=1, padding=7, bias=False)
self.c11_1 = nn.Conv2d(in_channel, out_channel, kernel_size=11, stride=1, padding=5, bias=False)
self.c7_1 = nn.Conv2d(in_channel, out_channel, kernel_size=7, stride=1, padding=3, bias=False)
self.c3_1 = nn.Conv2d(in_channel, out_channel, kernel_size=3, stride=1, padding=1, bias=False)
self.c15_2 = nn.Conv2d(in_channel, out_channel, kernel_size=15, stride=1, padding=7, bias=False)
self.c11_2 = nn.Conv2d(in_channel, out_channel, kernel_size=11, stride=1, padding=5, bias=False)
self.c7_2 = nn.Conv2d(in_channel, out_channel, kernel_size=7, stride=1, padding=3, bias=False)
self.c3_2 = nn.Conv2d(in_channel, out_channel, kernel_size=3, stride=1, padding=1, bias=False)
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.c1_gpb = nn.Conv2d(in_channel, out_channel, kernel_size=1, bias=False)
self.bn = nn.BatchNorm2d(out_channel)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
input_size = x.size()[2:]
x15_1 = self.c15_1(x)
x15_1 = self.bn(x15_1)
x15_1 = self.relu(x15_1)
x15_2 = self.c15_2(x15_1)
x15_2 = self.bn(x15_2)
x11_1 = self.c11_1(x)
x11_1 = self.bn(x11_1)
x11_1 = self.relu(x11_1)
x11_2 = self.c11_2(x11_1)
x11_2 = self.bn(x11_2)
x7_1 = self.c7_1(x)
x7_1 = self.bn(x7_1)
x7_1 = self.relu(x7_1)
x7_2 = self.c7_2(x7_1)
x7_2 = self.bn(x7_2)
x3_1 = self.c3_1(x)
x3_1 = self.bn(x3_1)
x3_1 = self.relu(x3_1)
x3_2 = self.c3_2(x3_1)
x3_2 = self.bn(x3_2)
x_gp = self.avg_pool(x)
x_gp = self.c1_gpb(x_gp)
x_gp = self.bn(x_gp)
x_gp = F.upsample(x_gp, size=input_size, mode='bilinear')
out = torch.cat([x_gp, x15_2, x11_2, x7_2, x3_2], dim=1)
return out
# MV2_9+ GAU
class MV3_1_ResNet(nn.Module):
def __init__(self, block, layers, num_classes=1000):
super(MV3_1_ResNet, self).__init__()
# self.do = nn.Dropout(p=0.5)
self.inplanes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3,
bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.rb1_1 = RefineBlock(256)
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.rb2_1 = RefineBlock(512)
self.layer3 = self._make_layer(block, 256, layers[2], stride=2)
self.rb3_1 = RefineBlock(1024)
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
self.rb4_1 = RefineBlock(2048)
# self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
# self.fc = nn.Linear(512 * block.expansion, num_classes)
# only for >=res50
# self.fpa=FPA(2048,512)
self.fpa = FPA(512, 512)
self.rb4_2 = RefineBlock(512 * 5)
self.fuse43 = GAU(512, 512)
# self.post_proc43 = conv3x3_bn(512*2,512)
self.rb3_2 = RefineBlock(512)
self.fuse32 = GAU(512, 512)
self.rb2_2 = RefineBlock(512)
# self.post_proc32 = conv3x3_bn(512)
self.fuse21 = GAU(512, 512)
self.rb1_2 = RefineBlock(512)
# self.post_proc21 = conv3x3_bn(512)
self.class_conv = nn.Conv2d(512, num_classes, kernel_size=3, stride=1,
padding=1, bias=True)
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
ori_size = x.size()[2:]
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.maxpool(x)
l1 = self.layer1(x)
l2 = self.layer2(l1)
l3 = self.layer3(l2)
l4 = self.layer4(l3)
l1 = self.rb1_1(l1)
l2 = self.rb2_1(l2)
l3 = self.rb3_1(l3)
l4 = self.rb4_1(l4)
l4 = self.fpa(l4)
l4 = self.rb4_2(l4)
x_fuse43 = self.fuse43(l4, l3)
x_fuse43 = self.rb3_2(x_fuse43)
x_fuse32 = self.fuse32(x_fuse43, l2)
x_fuse32 = self.rb2_2(x_fuse32)
x_fuse21 = self.fuse21(x_fuse32, l1)
x_fuse21 = self.rb1_2(x_fuse21)
# x_fuse21=self.do(x_fuse21)
x = self.class_conv(x_fuse21)
x = F.upsample(x, ori_size, mode='bilinear')
return x
def MV3_1_ResNet18(num_classes, pretrained=False, **kwargs):
"""Constructs a MV1_ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = MV3_1_ResNet(BasicBlock, [2, 2, 2, 2], **kwargs, num_classes=num_classes)
if pretrained:
key = '18_imagenet'
url = models_urls[key]
model.load_state_dict(maybe_download(key, url), strict=False)
return model
def MV3_1_ResNet34(num_classes, pretrained=False, **kwargs):
"""Constructs a MV1_ResNet-34 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = MV3_1_ResNet(BasicBlock, [3, 4, 6, 3], **kwargs, num_classes=num_classes)
if pretrained:
key = '34_imagenet'
url = models_urls[key]
model.load_state_dict(maybe_download(key, url), strict=False)
return model
def MV3_1_ResNet50(num_classes, pretrained=True, **kwargs):
"""Constructs a MV1_ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = MV3_1_ResNet(Bottleneck, [3, 4, 6, 3], **kwargs, num_classes=num_classes)
if pretrained:
key = '50_imagenet'
url = models_urls[key]
model.load_state_dict(maybe_download(key, url), strict=False)
print("load imagenet res50")
return model
def MV3_1_ResNet101(num_classes, pretrained=True, **kwargs):
"""Constructs a MV1_ResNet-101 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = MV3_1_ResNet(Bottleneck, [3, 4, 23, 3], **kwargs, num_classes=num_classes)
if pretrained:
key = '101_imagenet'
url = models_urls[key]
model.load_state_dict(maybe_download(key, url), strict=False)
print("load imagenet res101")
return model
def MV3_1_ResNet152(num_classes, pretrained=False, **kwargs):
"""Constructs a MV1_ResNet-152 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = MV3_1_ResNet(Bottleneck, [3, 8, 36, 3], **kwargs, num_classes=num_classes)
if pretrained:
key = '152_imagenet'
url = models_urls[key]
model.load_state_dict(maybe_download(key, url), strict=False)
return model
| 33.830287
| 108
| 0.622521
|
b6be075145242767308ca73288e86b4d2c3e849e
| 9,395
|
py
|
Python
|
python/Lib/site-packages/tectle/packaging.py
|
ksritharan/tectle
|
ca76424d85e66b041b40997838a3ceb79266efab
|
[
"MIT"
] | 1
|
2021-03-04T14:58:05.000Z
|
2021-03-04T14:58:05.000Z
|
python/Lib/site-packages/tectle/packaging.py
|
ksritharan/tectle
|
ca76424d85e66b041b40997838a3ceb79266efab
|
[
"MIT"
] | 8
|
2021-02-26T02:32:59.000Z
|
2021-05-28T02:22:07.000Z
|
python/Lib/site-packages/tectle/packaging.py
|
ksritharan/business-automation
|
ca76424d85e66b041b40997838a3ceb79266efab
|
[
"MIT"
] | null | null | null |
from .db import get_connection, get_data_dict
from flask import render_template, url_for, make_response
import requests
import traceback
import math
import logging
logger = logging.getLogger()
def do_packaging():
context = {'packaging_page': True}
conn = get_connection()
cur = conn.cursor()
query = """
SELECT *
FROM package_classes
"""
package_classes = get_data_dict(cur, query)
query = """
SELECT *
FROM boxes
"""
boxes = get_data_dict(cur, query)
select_template = 'COALESCE("%(class)s".quantity, 0) %(class)s_quantity'
join_template = """
LEFT JOIN packaging "%(class)s"
ON "%(class)s".package_class_id = %(id)s
AND "%(class)s".package_id = up.package_id
"""
selects = ",\n".join([select_template % package_class for package_class in package_classes])
joins = "\n".join([join_template % package_class for package_class in package_classes])
query = """
WITH uniq_packaging as (
SELECT package_id, box_id
FROM packaging
GROUP BY package_id, box_id
)
SELECT up.package_id,
b.type,
%s
FROM uniq_packaging up
JOIN boxes b
ON b.id = up.box_id
%s
""" % (selects, joins)
keys = [("%(class)s_quantity" % package_class, package_class['id']) for package_class in package_classes]
packaging = get_data_dict(cur, query)
for row in packaging:
row['quantities'] = []
for package_class in package_classes:
info = dict(package_class)
info['qty'] = row['%(class)s_quantity' % package_class]
row['quantities'].append(info)
context['packaging'] = packaging
context['package_classes'] = package_classes
context['boxes_list'] = boxes
return render_template('packaging.html', **context)
def do_add_packaging(form_data):
response = None
conn = get_connection()
cur = conn.cursor()
rows = []
try:
query = "SELECT COALESCE(MAX(package_id)+1, 1) FROM packaging"
cur.execute(query)
package_id = cur.fetchone()[0]
for key in form_data:
if key.startswith('package-class-'):
package_class_id = key.split('package-class-')[1]
row = {'box_id': form_data['box_id'],
'package_class_id': package_class_id,
'quantity': form_data[key],
'package_id': package_id
}
rows.append(row)
query = """
INSERT INTO packaging (package_id, box_id, package_class_id, quantity)
VALUES %s
""" % ",\n".join(["(%(package_id)s, %(box_id)s, %(package_class_id)s, %(quantity)s)" % row for row in rows])
cur.execute(query)
update_package_configs(cur)
conn.commit()
response = make_response('success', 200)
except Exception as e:
logger.exception(e)
tb = traceback.format_exc()
response = make_response(tb, 400)
return response
def do_edit_packaging(form_data):
response = None
conn = get_connection()
cur = conn.cursor()
try:
for key in form_data:
if key.startswith('package-class-'):
package_class_id = key.split('package-class-')[1]
row = {'box_id': form_data['box_id'],
'package_class_id': package_class_id,
'quantity': form_data[key],
'package_id': form_data['package_id']
}
query = """
SELECT COUNT(1)
FROM packaging
WHERE package_id = %(package_id)s
AND package_class_id = %(package_class_id)s
""" % row
cur.execute(query)
exists = cur.fetchone()[0] == 1
if exists:
query = """
UPDATE packaging
SET quantity = %(quantity)s,
box_id = %(box_id)s
WHERE package_id = %(package_id)s
AND package_class_id = %(package_class_id)s
""" % row
else:
query = """
INSERT INTO packaging (package_id, box_id, package_class_id, quantity)
VALUES (%(package_id)s, %(box_id)s, %(package_class_id)s, %(quantity)s)
""" % row
cur.execute(query)
update_package_configs(cur)
conn.commit()
response = make_response('success', 200)
except Exception as e:
logger.exception(e)
tb = traceback.format_exc()
response = make_response(tb, 400)
return response
def do_remove_packaging(package_id):
response = None
conn = get_connection()
cur = conn.cursor()
try:
query = """
DELETE FROM packaging
WHERE package_id = %s
""" % package_id
cur.execute(query)
update_package_configs(cur)
conn.commit()
response = make_response('success', 200)
except Exception as e:
logger.exception(e)
tb = traceback.format_exc()
response = make_response(tb, 400)
return response
def update_package_configs(cur):
cur.execute("DELETE FROM package_configs")
rows = get_data_dict(cur, "SELECT * FROM shipping_costs")
shipping_costs = {}
for row in rows:
box_id = row['box_id']
weight_str = row['weight_kg']
if box_id not in shipping_costs:
shipping_costs[box_id] = {}
shipping_costs[box_id][weight_str] = row
rows = get_data_dict(cur, "SELECT * FROM package_classes")
package_class_list = [row for row in rows]
package_ids = ["%s" % row['id'] for row in rows]
package_classes = {str(row["id"]): row for row in rows}
rows = get_data_dict(cur, "SELECT p.*, b.cost box_cost, b.weight_kg, (b.length_in*b.width_in*b.height_in) volume FROM packaging p JOIN boxes b ON b.id = p.box_id")
packaging = {}
for row in rows:
package_id = row['package_id']
if package_id not in packaging:
packaging[package_id] = {'box_id': row['box_id'], 'weight_kg': row['weight_kg'], 'box_volume': row['volume'], 'box_cost': row['box_cost']}
package_class_id = str(row['package_class_id'])
packaging[package_id][package_class_id] = row['quantity']
packaging[package_id]['weight_kg'] += package_classes[package_class_id]['weight_kg']*row['quantity']
keys = ['box_id']
keys.extend(package_ids)
MAX_WEIGHT = 2.0
combinations = {}
for package_id in packaging:
package = packaging[package_id]
combinations.update(add_combinations(MAX_WEIGHT, package_class_list, package, combinations, keys))
min_volume_combinations = {}
for c in combinations:
combination = combinations[c]
weight = max(0.1, round(math.ceil(round(combination['weight_kg'],2)/0.1)*0.1, 1))
if weight <= MAX_WEIGHT:
weight_str = "%.1f" % weight
shipping_cost = shipping_costs[combination['box_id']][weight_str]
key = get_hashable(combination, package_ids)
if (key not in min_volume_combinations or combination['box_volume'] < min_volume_combinations[key]['box_volume']):
min_volume_combinations[key] = dict(combination)
min_volume_combinations[key]['cost_ca'] = shipping_cost['cost_ca'] + combination['box_cost']
min_volume_combinations[key]['cost_us'] = shipping_cost['cost_us'] + combination['box_cost']
for key in min_volume_combinations:
row = min_volume_combinations[key]
query = "SELECT COALESCE(MAX(package_id)+1, 1) FROM package_configs"
cur.execute(query)
package_id = cur.fetchone()[0]
template = "(%%s, %(box_id)s, %%s, %%s, %(cost_ca)s, %(cost_us)s)" % row
values = ",\n".join([template % (package_id, x, row[x]) for x in package_ids])
query = """
INSERT INTO package_configs (package_id, box_id, package_class_id, quantity, cost_ca, cost_us)
VALUES %s
""" % values
cur.execute(query)
def add_combinations(max_weight, package_class_list, current_combination, combinations, keys):
key = get_hashable(current_combination, keys)
if key in combinations:
return combinations
if current_combination['weight_kg'] <= max_weight:
combinations[key] = current_combination
for package_class in package_class_list:
weight_kg = package_class['weight_kg']
package_class_id = str(package_class['id'])
if (current_combination[package_class_id] > 1):
new_combination = dict(current_combination)
new_combination[package_class_id] -= 1
new_combination['weight_kg'] -= weight_kg
combinations.update(add_combinations(max_weight, package_class_list, new_combination, combinations, keys))
return combinations
def get_hashable(data, keys):
return ",".join([str(data[key]) for key in keys])
| 40.149573
| 167
| 0.585737
|
f458b60c91f2d6dc7a346f8a895e33fc7ca69010
| 7,664
|
py
|
Python
|
models/backbones/skip/downsampler.py
|
Khanhnn00/blind-image-sr
|
42bba4894ac9ee6595d2fff9b25a7678f323ad87
|
[
"Apache-2.0"
] | 93
|
2021-05-11T08:35:24.000Z
|
2022-03-30T10:41:14.000Z
|
models/backbones/skip/downsampler.py
|
Khanhnn00/blind-image-sr
|
42bba4894ac9ee6595d2fff9b25a7678f323ad87
|
[
"Apache-2.0"
] | 14
|
2021-05-20T05:05:19.000Z
|
2022-01-22T22:09:36.000Z
|
models/backbones/skip/downsampler.py
|
Khanhnn00/blind-image-sr
|
42bba4894ac9ee6595d2fff9b25a7678f323ad87
|
[
"Apache-2.0"
] | 29
|
2021-05-13T04:16:56.000Z
|
2022-03-03T02:07:24.000Z
|
import numpy as np
import torch
import torch.nn as nn
class Downsampler(nn.Module):
"""
http://www.realitypixels.com/turk/computergraphics/ResamplingFilters.pdf
"""
def __init__(
self, n_planes, factor, kernel_type, phase=0, kernel_width=None, support=None, sigma=None, preserve_size=False
):
super(Downsampler, self).__init__()
assert phase in [0, 0.5], "phase should be 0 or 0.5"
if kernel_type == "lanczos2":
support = 2
kernel_width = 4 * factor + 1
kernel_type_ = "lanczos"
elif kernel_type == "lanczos3":
support = 3
kernel_width = 6 * factor + 1
kernel_type_ = "lanczos"
elif kernel_type == "gauss12":
kernel_width = 7
sigma = 1 / 2
kernel_type_ = "gauss"
elif kernel_type == "gauss1sq2":
kernel_width = 9
sigma = 1.0 / np.sqrt(2)
kernel_type_ = "gauss"
elif kernel_type in ["lanczos", "gauss", "box"]:
kernel_type_ = kernel_type
else:
assert False, "wrong name kernel"
# note that `kernel width` will be different to actual size for phase = 1/2
self.kernel = get_kernel(factor, kernel_type_, phase, kernel_width, support=support, sigma=sigma)
downsampler = nn.Conv2d(n_planes, n_planes, kernel_size=self.kernel.shape, stride=factor, padding=0)
downsampler.weight.data[:] = 0
downsampler.bias.data[:] = 0
kernel_torch = torch.from_numpy(self.kernel)
for i in range(n_planes):
downsampler.weight.data[i, i] = kernel_torch
self.downsampler_ = downsampler
if preserve_size:
if self.kernel.shape[0] % 2 == 1:
pad = int((self.kernel.shape[0] - 1) / 2.0)
else:
pad = int((self.kernel.shape[0] - factor) / 2.0)
self.padding = nn.ReplicationPad2d(pad)
self.preserve_size = preserve_size
def forward(self, input):
if self.preserve_size:
x = self.padding(input)
else:
x = input
self.x = x
return self.downsampler_(x)
class Blurconv(nn.Module):
"""
http://www.realitypixels.com/turk/computergraphics/ResamplingFilters.pdf
"""
def __init__(self, n_planes=1, preserve_size=False):
super(Blurconv, self).__init__()
# self.kernel = kernel
# blurconv = nn.Conv2d(n_planes, n_planes, kernel_size=self.kernel.shape, stride=1, padding=0)
# blurconvr.weight.data = self.kernel
# blurconv.bias.data[:] = 0
self.n_planes = n_planes
self.preserve_size = preserve_size
# kernel_torch = torch.from_numpy(self.kernel)
# for i in range(n_planes):
# blurconv.weight.data[i, i] = kernel_torch
# self.blurconv_ = blurconv
#
# if preserve_size:
#
# if self.kernel.shape[0] % 2 == 1:
# pad = int((self.kernel.shape[0] - 1) / 2.)
# else:
# pad = int((self.kernel.shape[0] - factor) / 2.)
#
# self.padding = nn.ReplicationPad2d(pad)
#
# self.preserve_size = preserve_size
def forward(self, input, kernel):
if self.preserve_size:
if kernel.shape[0] % 2 == 1:
pad = int((kernel.shape[3] - 1) / 2.0)
else:
pad = int((kernel.shape[3] - 1.0) / 2.0)
padding = nn.ReplicationPad2d(pad)
x = padding(input)
else:
x = input
blurconv = nn.Conv2d(
self.n_planes, self.n_planes, kernel_size=kernel.size(3), stride=1, padding=0, bias=False
).cuda()
blurconv.weight.data[:] = kernel
return blurconv(x)
class Blurconv2(nn.Module):
"""
http://www.realitypixels.com/turk/computergraphics/ResamplingFilters.pdf
"""
def __init__(self, n_planes=1, preserve_size=False, k_size=21):
super(Blurconv2, self).__init__()
self.n_planes = n_planes
self.k_size = k_size
self.preserve_size = preserve_size
self.blurconv = nn.Conv2d(self.n_planes, self.n_planes, kernel_size=k_size, stride=1, padding=0, bias=False)
# self.blurconv.weight.data[:] /= self.blurconv.weight.data.sum()
def forward(self, input):
if self.preserve_size:
pad = int((self.k_size - 1.0) / 2.0)
padding = nn.ReplicationPad2d(pad)
x = padding(input)
else:
x = input
# self.blurconv.weight.data[:] /= self.blurconv.weight.data.sum()
return self.blurconv(x)
def get_kernel(factor, kernel_type, phase, kernel_width, support=None, sigma=None):
assert kernel_type in ["lanczos", "gauss", "box"]
# factor = float(factor)
if phase == 0.5 and kernel_type != "box":
kernel = np.zeros([kernel_width - 1, kernel_width - 1])
else:
kernel = np.zeros([kernel_width, kernel_width])
if kernel_type == "box":
assert phase == 0.5, "Box filter is always half-phased"
kernel[:] = 1.0 / (kernel_width * kernel_width)
elif kernel_type == "gauss":
assert sigma, "sigma is not specified"
assert phase != 0.5, "phase 1/2 for gauss not implemented"
center = (kernel_width + 1.0) / 2.0
print(center, kernel_width)
sigma_sq = sigma * sigma
for i in range(1, kernel.shape[0] + 1):
for j in range(1, kernel.shape[1] + 1):
di = (i - center) / 2.0
dj = (j - center) / 2.0
kernel[i - 1][j - 1] = np.exp(-(di * di + dj * dj) / (2 * sigma_sq))
kernel[i - 1][j - 1] = kernel[i - 1][j - 1] / (2.0 * np.pi * sigma_sq)
elif kernel_type == "lanczos":
assert support, "support is not specified"
center = (kernel_width + 1) / 2.0
for i in range(1, kernel.shape[0] + 1):
for j in range(1, kernel.shape[1] + 1):
if phase == 0.5:
di = abs(i + 0.5 - center) / factor
dj = abs(j + 0.5 - center) / factor
else:
di = abs(i - center) / factor
dj = abs(j - center) / factor
val = 1
if di != 0:
val = val * support * np.sin(np.pi * di) * np.sin(np.pi * di / support)
val = val / (np.pi * np.pi * di * di)
if dj != 0:
val = val * support * np.sin(np.pi * dj) * np.sin(np.pi * dj / support)
val = val / (np.pi * np.pi * dj * dj)
kernel[i - 1][j - 1] = val
else:
assert False, "wrong method name"
kernel /= kernel.sum()
return kernel
# a = Downsampler(n_planes=3, factor=2, kernel_type='lanczos2', phase='1', preserve_size=True)
#################
# Learnable downsampler
# KS = 32
# dow = nn.Sequential(nn.ReplicationPad2d(int((KS - factor) / 2.)), nn.Conv2d(1,1,KS,factor))
# class Apply(nn.Module):
# def __init__(self, what, dim, *args):
# super(Apply, self).__init__()
# self.dim = dim
# self.what = what
# def forward(self, input):
# inputs = []
# for i in range(input.size(self.dim)):
# inputs.append(self.what(input.narrow(self.dim, i, 1)))
# return torch.cat(inputs, dim=self.dim)
# def __len__(self):
# return len(self._modules)
# downs = Apply(dow, 1)
# downs.type(dtype)(net_input.type(dtype)).size()
| 31.669421
| 118
| 0.54345
|
bbbf44f14206ade2a822d3727974970a676d5751
| 3,990
|
py
|
Python
|
census/tf-keras/trainer/task.py
|
agodi/cloudml-samples
|
14c6e15979a3f62c87bb32f8cc730b402dfaaeee
|
[
"Apache-2.0"
] | null | null | null |
census/tf-keras/trainer/task.py
|
agodi/cloudml-samples
|
14c6e15979a3f62c87bb32f8cc730b402dfaaeee
|
[
"Apache-2.0"
] | null | null | null |
census/tf-keras/trainer/task.py
|
agodi/cloudml-samples
|
14c6e15979a3f62c87bb32f8cc730b402dfaaeee
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trains a Keras model to predict income bracket from other Census data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
from . import model
from . import util
import tensorflow as tf
def get_args():
"""Argument parser.
Returns:
Dictionary of arguments.
"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--job-dir',
type=str,
required=True,
help='local or GCS location for writing checkpoints and exporting '
'models')
parser.add_argument(
'--num-epochs',
type=int,
default=20,
help='number of times to go through the data, default=20')
parser.add_argument(
'--batch-size',
default=128,
type=int,
help='number of records to read during each training step, default=128')
parser.add_argument(
'--learning-rate',
default=.01,
type=float,
help='learning rate for gradient descent, default=.01')
parser.add_argument(
'--verbosity',
choices=['DEBUG', 'ERROR', 'FATAL', 'INFO', 'WARN'],
default='INFO')
args, _ = parser.parse_known_args()
return args
def train_and_evaluate(args):
"""Trains and evaluates the Keras model.
Uses the Keras model defined in model.py and trains on data loaded and
preprocessed in util.py. Saves the trained model in TensorFlow SavedModel
format to the path defined in part by the --job-dir argument.
Args:
args: dictionary of arguments - see get_args() for details
"""
train_x, train_y, eval_x, eval_y = util.load_data()
# dimensions
num_train_examples, input_dim = train_x.shape
num_eval_examples = eval_x.shape[0]
# Create the Keras Model
keras_model = model.create_keras_model(
input_dim=input_dim, learning_rate=args.learning_rate)
# Pass a numpy array by passing DataFrame.values
training_dataset = model.input_fn(
features=train_x.values,
labels=train_y,
shuffle=True,
num_epochs=args.num_epochs,
batch_size=args.batch_size)
# Pass a numpy array by passing DataFrame.values
validation_dataset = model.input_fn(
features=eval_x.values,
labels=eval_y,
shuffle=False,
num_epochs=args.num_epochs,
batch_size=num_eval_examples)
# Setup Learning Rate decay.
lr_decay_cb = tf.keras.callbacks.LearningRateScheduler(
lambda epoch: args.learning_rate + 0.02 * (0.5 ** (1 + epoch)),
verbose=True)
# Setup TensorBoard callback.
tensorboard_cb = tf.keras.callbacks.TensorBoard(
os.path.join(args.job_dir, 'keras_tensorboard'),
histogram_freq=1)
# Train model
keras_model.fit(
training_dataset,
steps_per_epoch=int(num_train_examples / args.batch_size),
epochs=args.num_epochs,
validation_data=validation_dataset,
validation_steps=1,
verbose=1,
callbacks=[lr_decay_cb, tensorboard_cb])
export_path = os.path.join(args.job_dir, 'keras_export')
tf.keras.models.save_model(keras_model, export_path)
print('Model exported to: {}'.format(export_path))
if __name__ == '__main__':
args = get_args()
tf.compat.v1.logging.set_verbosity(args.verbosity)
train_and_evaluate(args)
| 30.458015
| 80
| 0.679449
|
159bb4957bfb24489c5ae9ccbc41cc2f4c9db444
| 1,220
|
py
|
Python
|
getGuess.py
|
icyflame/cows-and-bulls
|
06a4e5456ae97cf1d46ff9743b3642813d5cffce
|
[
"MIT"
] | null | null | null |
getGuess.py
|
icyflame/cows-and-bulls
|
06a4e5456ae97cf1d46ff9743b3642813d5cffce
|
[
"MIT"
] | null | null | null |
getGuess.py
|
icyflame/cows-and-bulls
|
06a4e5456ae97cf1d46ff9743b3642813d5cffce
|
[
"MIT"
] | null | null | null |
from Tkinter import *
import tkFont
import tkMessageBox
alert = tkMessageBox.showinfo
SHOW_TOPBAR = True
def getGuess():
flag = True
while flag:
flag = False
root = Toplevel()
root.title('Enter your guess')
font2 = tkFont.Font(family='Helvetica',size=20)
f = Frame(root)
f.grid()
f.grid_propagate(1) ##size can change
Label(f,text='Enter your guess:').grid(row=1,column=0)
a = Entry(f,width=3)
a.focus()
a.grid(row=1,column=1)
c = Button(f,text='Confirm',command=root.quit)
c.grid(row=2,column=0)
root.mainloop()
guess = a.get()
if not (len(guess) == 3):
alert('INSTRUCTIONS','YOU DID NOT ENTER A THREE DIGIT NUMBER')
root.destroy()
flag = True
continue
try:
int(guess)
except:
alert('INSTRUCTIONS','A NUMBER SHOULD HAVE DIGITS ONLY')
root.destroy()
flag = True
continue
root.destroy()
return (guess)
##Script level testing code.
##
##root = Tk()
##
##print getGuess()
##
##mainloop()
| 14.698795
| 74
| 0.518033
|
dba227c05becb8d7edde59d391e48c06ad890ec3
| 2,561
|
py
|
Python
|
examples/ad_manager/v201905/product_template_service/activate_product_templates.py
|
ale180192/googleads-python-lib
|
783a2d40a49956fb16ed73280708f6f9e322aa09
|
[
"Apache-2.0"
] | 1
|
2020-05-27T15:48:47.000Z
|
2020-05-27T15:48:47.000Z
|
examples/ad_manager/v201905/product_template_service/activate_product_templates.py
|
ale180192/googleads-python-lib
|
783a2d40a49956fb16ed73280708f6f9e322aa09
|
[
"Apache-2.0"
] | null | null | null |
examples/ad_manager/v201905/product_template_service/activate_product_templates.py
|
ale180192/googleads-python-lib
|
783a2d40a49956fb16ed73280708f6f9e322aa09
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example activates a product template.
To determine which product templates exist, run get_all_product_templates.py.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set the id of the product template to activate.
PRODUCT_TEMPLATE_ID = 'INSERT_PRODUCT_TEMPLATE_ID_HERE'
def main(client, product_template_id):
# Initialize appropriate service.
product_template_service = client.GetService(
'ProductTemplateService', version='v201905')
# Create query.
statement = (ad_manager.StatementBuilder(version='v201905')
.Where('id = :id')
.WithBindVariable('id', int(product_template_id))
.Limit(1))
product_templates_activated = 0
# Get product_templates by statement.
while True:
response = product_template_service.getProductTemplatesByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for product_template in response['results']:
print('Product template with id "%s" and name "%s" will be '
'activated.' % (product_template['id'],
product_template['name']))
# Perform action.
result = product_template_service.performProductTemplateAction(
{'xsi_type': 'ActivateProductTemplates'}, statement.ToStatement())
if result and int(result['numChanges']) > 0:
product_templates_activated += int(result['numChanges'])
statement.offset += statement.limit
else:
break
# Display results.
if product_templates_activated > 0:
print('Number of product templates '
'activated: %s' % product_templates_activated)
else:
print('No product templates were activated.')
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, PRODUCT_TEMPLATE_ID)
| 34.146667
| 77
| 0.713784
|
c36123ae62dcecd3d2b8eaab2a05c54c6b66128a
| 1,454
|
py
|
Python
|
src/main/jython/sfdc/metadata_package/undeploy.py
|
xebialabs-external/xld-salesforce-plugin
|
041bb789baf038191b19bf91862d3a54683087ae
|
[
"MIT"
] | 3
|
2016-10-13T20:51:35.000Z
|
2018-11-29T15:42:42.000Z
|
src/main/jython/sfdc/metadata_package/undeploy.py
|
xebialabs-external/xld-salesforce-plugin
|
041bb789baf038191b19bf91862d3a54683087ae
|
[
"MIT"
] | 1
|
2019-07-01T18:15:56.000Z
|
2019-07-01T18:15:56.000Z
|
src/main/jython/sfdc/metadata_package/undeploy.py
|
xebialabs-community/xld-salesforce-plugin
|
69842abf80cc9dacb7556c5e56ad168f8604607c
|
[
"MIT"
] | null | null | null |
#
# Copyright 2019 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
from sfdc.metadata_package import SalesForceClient
def deploy_package(org_ci, deployed):
print "Starting undeployment"
client = SalesForceClient.new_instance(org_ci)
client.undeploy_package(deployed.file.path)
print "Done"
if __name__ == '__main__' or __name__ == '__builtin__':
container = previousDeployed.container
deploy_package(container, previousDeployed)
| 63.217391
| 462
| 0.790234
|
7542d8516f9fd1afddd6a04d390fa18a913c77bf
| 5,037
|
py
|
Python
|
azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py
|
dominicbetts/azure-iot-sdk-python
|
ea70d2a319df2d602f8102e70a4e88635febf1b8
|
[
"MIT"
] | null | null | null |
azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py
|
dominicbetts/azure-iot-sdk-python
|
ea70d2a319df2d602f8102e70a4e88635febf1b8
|
[
"MIT"
] | null | null | null |
azure-iot-device/azure/iot/device/provisioning/provisioning_device_client.py
|
dominicbetts/azure-iot-sdk-python
|
ea70d2a319df2d602f8102e70a4e88635febf1b8
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
This module contains user-facing synchronous Provisioning Device Client for Azure Provisioning
Device SDK. This client uses Symmetric Key and X509 authentication to register devices with an
IoT Hub via the Device Provisioning Service.
"""
import logging
from azure.iot.device.common.evented_callback import EventedCallback
from .abstract_provisioning_device_client import AbstractProvisioningDeviceClient
from .abstract_provisioning_device_client import log_on_register_complete
from azure.iot.device.provisioning.pipeline import constant as dps_constant
from .pipeline import exceptions as pipeline_exceptions
from azure.iot.device import exceptions
logger = logging.getLogger(__name__)
def handle_result(callback):
try:
return callback.wait_for_completion()
except pipeline_exceptions.ConnectionDroppedError as e:
raise exceptions.ConnectionDroppedError(
message="Lost connection to the provisioning server", cause=e
)
except pipeline_exceptions.ConnectionFailedError as e:
raise exceptions.ConnectionFailedError(
message="Could not connect to the provisioning server", cause=e
)
except pipeline_exceptions.UnauthorizedError as e:
raise exceptions.CredentialError(message="Credentials invalid, could not connect", cause=e)
except pipeline_exceptions.ProtocolClientError as e:
raise exceptions.ClientError(message="Error in the provisioning client", cause=e)
except pipeline_exceptions.PipelineNotRunning as e:
raise exceptions.ClientError(message="Client has already been shut down", cause=e)
except Exception as e:
raise exceptions.ClientError(message="Unexpected failure", cause=e)
class ProvisioningDeviceClient(AbstractProvisioningDeviceClient):
"""
Client which can be used to run the registration of a device with provisioning service
using Symmetric Key or X509 authentication.
"""
def register(self):
"""
Register the device with the provisioning service
This is a synchronous call, meaning that this function will not return until the
registration process has completed successfully or the attempt has resulted in a failure.
Before returning, the client will also disconnect from the provisioning service.
If a registration attempt is made while a previous registration is in progress it may
throw an error.
Once the device is successfully registered, the client will no longer be operable.
:returns: RegistrationResult indicating the result of the registration.
:rtype: :class:`azure.iot.device.RegistrationResult`
:raises: :class:`azure.iot.device.exceptions.CredentialError` if credentials are invalid
and a connection cannot be established.
:raises: :class:`azure.iot.device.exceptions.ConnectionFailedError` if establishing a
connection results in failure.
:raises: :class:`azure.iot.device.exceptions.ConnectionDroppedError` if connection is lost
during execution.
:raises: :class:`azure.iot.device.exceptions.ClientError` if there is an unexpected failure
during execution.
"""
logger.info("Registering with Provisioning Service...")
if not self._pipeline.responses_enabled[dps_constant.REGISTER]:
self._enable_responses()
# Register
register_complete = EventedCallback(return_arg_name="result")
self._pipeline.register(payload=self._provisioning_payload, callback=register_complete)
result = handle_result(register_complete)
log_on_register_complete(result)
# Implicitly shut down the pipeline upon successful completion
if result is not None and result.status == "assigned":
logger.debug("Beginning pipeline shutdown operation")
shutdown_complete = EventedCallback()
self._pipeline.shutdown(callback=shutdown_complete)
handle_result(shutdown_complete)
logger.debug("Completed pipeline shutdown operation")
return result
def _enable_responses(self):
"""Enable to receive responses from Device Provisioning Service.
This is a synchronous call, meaning that this function will not return until the feature
has been enabled.
"""
logger.info("Enabling reception of response from Device Provisioning Service...")
subscription_complete = EventedCallback()
self._pipeline.enable_responses(callback=subscription_complete)
handle_result(subscription_complete)
logger.info("Successfully subscribed to Device Provisioning Service to receive responses")
| 45.378378
| 99
| 0.717292
|
028db41bdb0b55df2f81a66a6ca74c7577a1f95b
| 970
|
py
|
Python
|
cal_scores/wodeutil/constant/SysToken.py
|
bzhao2718/ReliableSummEvalReg
|
ea3281855fb4b922a514cb610fc2b70063534bf5
|
[
"MIT"
] | null | null | null |
cal_scores/wodeutil/constant/SysToken.py
|
bzhao2718/ReliableSummEvalReg
|
ea3281855fb4b922a514cb610fc2b70063534bf5
|
[
"MIT"
] | null | null | null |
cal_scores/wodeutil/constant/SysToken.py
|
bzhao2718/ReliableSummEvalReg
|
ea3281855fb4b922a514cb610fc2b70063534bf5
|
[
"MIT"
] | null | null | null |
import argparse
SysToken = argparse.Namespace(
SUCCESS_IND=1,
FAIL_IND=0,
EXT_JSON=".json",
EXT_TXT=".txt",
EXT_PT=".pt",
EXT_INI=".ini",
backup="backup",
workspace_init="wordspace.ini",
WORKSPACE_CONFIG_PATH="WORKSAPCE_INIT_PATH",
TASK_CONFIG_PATH="TASK_CONFIG_PATH",
str_config="config",
str_config_data="config_data",
str_experiment="experiment",
str_data="data",
str_model="task",
str_workspace="workspace",
str_src="src",
data_dirs=['raw', 'processed', 'interim', 'sample'],
prj_dirs=['data_loader', 'prj_config', 'experiment', 'data', 'model', 'trainers', 'tasks', 'wodeutil'],
src_dirs=['utils', 'task', 'trainers', 'data_loader'],
exp_dirs=['chkpoint'],
# str_NEPTUNE_API_TOKEN="NEPTUNE_API_TOKEN",
# str_COMET_API_TOKEN="COMET_API_TOKEN",
API_TOKENS=["NEPTUNE_API_TOKEN", "COMET_API_TOKEN"],
TASK_SAMPLE="ReverseSenTask",
WORKSPACE_INI="workspace.ini",
)
| 30.3125
| 107
| 0.672165
|
443be64e49d0def08f8863366cd02d0afd7abbf8
| 39,564
|
py
|
Python
|
code/python/process_emails.py
|
nsob1c12/tetepy
|
2defb61dea95c69747b07d7c6b3e106b604455ee
|
[
"BSD-3-Clause"
] | null | null | null |
code/python/process_emails.py
|
nsob1c12/tetepy
|
2defb61dea95c69747b07d7c6b3e106b604455ee
|
[
"BSD-3-Clause"
] | null | null | null |
code/python/process_emails.py
|
nsob1c12/tetepy
|
2defb61dea95c69747b07d7c6b3e106b604455ee
|
[
"BSD-3-Clause"
] | 1
|
2018-11-21T09:53:44.000Z
|
2018-11-21T09:53:44.000Z
|
# This file is part of the TeTePy software
# Copyright (c) 2017, 2018, University of Southampton
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import email, email.Utils, types, os, os.path, mimetypes, string, time, smtplib
import logging, exceptions, fcntl, sys, shutil, email.MIMEBase, email.MIMEText
import re, random, pprint, shelve, errno, textwrap
import mylogger
import enqueue_outgoing_mails
try:
import pwd
except:
raise ImportError,"Couldn't import pwd -- only available on unix systems"
Modulecode = pwd.getpwuid(os.getuid()).pw_name.upper()
print "Module code is", Modulecode
conf = __import__('config_' + Modulecode.lower())
from lab_helpers import *
import lab_helpers
log_global=None
log_level = conf.log_level
debug = 0
if 'subtest_tests' in dir(conf):
pass #expect tests
else:
#create empty fake entries
conf.subtest_tests = {} #no tests to be done
subtest_queue = '/non-existent'
class myException(exceptions.Exception):
pass
# Regex for mail daemon
_rx_email_daemon=r"daemon|deamon|fetchmail-daemon|FETCHMAIL-DAEMON|cron|root|postmaster"
# Regex for From-escaping in emails
_rx_from_escape=">(>*From (.|\n)*)"
# User cannot upload files with such names:
blacklisted_filenames=["log.txt","s.py"]
def is_true(x):
if x:
return True
else:
return False
def bundle_files_in_directory_in_email( directory,to,From,Subject ):
"""bundle all files in directory and return as email object
This is taken from Matthew Cowles
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/86674 """
import cStringIO
import base64
import email.Generator
import email.Message
import os
import quopri
mainMsg=email.Message.Message()
mainMsg["To"]=to
mainMsg["From"]=From
mainMsg["Subject"]=Subject
mainMsg["Mime-version"]="1.0"
mainMsg["Content-type"]="Multipart/mixed"
mainMsg.preamble="Mime message\n"
mainMsg.epilogue="" # To ensure that message ends with newline
# Get names of plain files
filenames = []
for f in os.listdir(directory):
if os.path.isfile(os.path.join(directory,f)):
filenames.append(f)
for fileName in filenames:
# print "working on",fileName
contentType,ignored=mimetypes.guess_type(fileName)
if contentType==None: # If no guess, use generic opaque type
contentType="application/octet-stream"
contentsEncoded=cStringIO.StringIO()
f=open(os.path.join(directory,fileName),"rb")
mainType=contentType[:contentType.find("/")]
if mainType=="text":
cte="quoted-printable"
quopri.encode(f,contentsEncoded,1) # 1 for encode tabs
else:
cte="base64"
base64.encode(f,contentsEncoded)
f.close()
subMsg=email.Message.Message()
subMsg.add_header("Content-type",contentType,name=fileName)
subMsg.add_header("Content-transfer-encoding",cte)
subMsg.set_payload(contentsEncoded.getvalue())
contentsEncoded.close()
mainMsg.attach(subMsg)
return mainMsg
def retrieve_assignment(assignment,message,student_dir,real_name,email_addr,logger):
logger.info("Retrieving files for %s" % repr(assignment))
(username,domain)=email_address_username_and_domain(email_addr)
#get list of files in student_dir
submission_dir = os.path.join(student_dir,assignment)
if os.path.exists ( submission_dir ):
submitted_files = os.listdir( submission_dir )
else:
errormail = replymail_error( message, "It seems that you have not yet submitted any files." )
append_mail_to_mailbox( errormail, username, logger, "(outgoing error mail: no files submitted->retrieval is impossible)" )
return None
files_by_type = analyze_filenames(assignment_file_map(assignment), submitted_files, logger)
report = submitted_files_report(assignment, files_by_type)
body = ["Dear %s (%s),\n\n" % (real_name,email_addr)]
body.append("Here is a list of your files found on the server for assignment '%s':\n\n" % assignment)
body.append(report)
body.append("\n\nPlease find attached to the _next_ email these files\n")
body.append("that you submitted for '%s'.\n\n" %assignment)
body.append("(In addition, there may be one (or more) files named\n")
body.append("'part00?.bin' which contain the body of your email and can \n")
body.append("be ignored.)\n\n")
subject = "[%s] summary of submitted files for '%s' (%s)" % ( conf.ModulecodeSubjectLine, assignment, time.asctime())
mail = enqueue_outgoing_mails.send_text_message( email_addr, conf.ModuleEmailAddress, string.join(body,""), subject)
append_mail_to_mailbox( mail, username, logger, "(outgoing retrieval report mail)" )
#now do retrieve the files and mail those
subject = "[%s] retrieved files for '%s' (%s)" % ( conf.ModulecodeSubjectLine, assignment, time.asctime())
From = conf.ModuleEmailAddress
to = email_addr
retrieval_return_mail = bundle_files_in_directory_in_email( submission_dir,to,From,subject)
text = enqueue_outgoing_mails.send_message(retrieval_return_mail)
append_mail_to_mailbox( text, 'test', logger, "(outgoing retrieval mail)" )
logger.info("Sent retrieval mail for %s" % repr(assignment))
def is_retrieval (subject):
"""check whether the first part of the subject is 'retrieve'"""
#catch empty subject
if subject == None:
return False
return is_true(re.match(r"\s*retrieve\s+",subject,re.IGNORECASE))
def extract_attachments(msg):
"""Extracts attachments from msg (which is a Message Object from
module 'email') and returns them in a dictionary:
key is name of file and value is content.
"""
log_local = ""
result = {}
counter = 0
for part in msg.walk():
# multipart/* are just containers
if part.get_content_type() == 'multipart':
continue
filename = part.get_filename()
if not(filename) or (listindex(blacklisted_filenames,filename)):
counter += 1
log_local += "Could not get file_name of attachment. "
filename = 'part-%03d%s' % (counter, ".bin")
log_local+="Assigned filename=%s to attachment. " % repr(filename)
counter += 1
result[filename]= part.get_payload(decode=1)
log_local += "Extracting attachment %i with name %s. " % (counter,repr(filename))
return (result, log_local)
def save_attachments( msg, dir ):
#connect to log file for user
logger = mylogger.attach_to_logfile( os.path.join(dir,'log.txt' ), level = log_level )
def keep_older_versions_of_this_file( filename ):
def change_name_version(newfilename,filename,changes=0):
#use just the filename (without path) for logging:
log_nfn = os.path.split(newfilename)[1]
log_fn = os.path.split(filename)[1]
logger.debug("Entering change_name_version %s <- %s " % (repr(log_nfn),repr(log_fn)))
try:
version = int(newfilename.split('.')[-1])
root = ".".join(newfilename.split('.')[0:-1])
except ValueError,msg:
logger.error( "problem with filename %s in increase_counter" % repr(filename))
raise ValueError,msg
if os.path.exists(newfilename):
logger.debug("file: %s exists -- recursive retry!" % repr(log_nfn))
changes = change_name_version(root+'.'+str(version+1),root+'.'+str(version),changes)
else:
logger.debug("Found last file: %s" % repr(log_nfn))
logger.debug( "About to rename %s to %s" % (repr(log_fn),repr(log_nfn)))
os.rename(filename,newfilename)
return changes+1
changes = None
if os.path.exists(filename):
changes = change_name_version(filename+'.1',filename)
return changes
#save_attachements starts here
(att, logstr) = extract_attachments( msg )
logger.info("============ save_attachments =======(%s)" % repr(dir))
logger.debug( logstr )
counter = 0
for filename in att.keys():
counter += 1
logger.debug("Need to extract attachment %i (named %s)" % (counter,repr(filename)))
if att[filename] == None:
logger.warn("Found empty attachement %i (att[%s]==None), Skipping" % (counter,repr(filename)))
continue
changes = keep_older_versions_of_this_file(os.path.join(dir, filename))
if changes:
logger.info("Extracting attachment %i (named %s, keeping %d old copies)" % (counter,repr(filename),changes) )
else:
logger.info("Extracting attachment %i (named %s)" % (counter,repr(filename)) )
fp = open(os.path.join(dir, filename), 'wb')
fp.write( att[filename] )
fp.close()
return att
def append_mail_to_mailbox( mail, student_login, logger, logcomment = "" ):
username = student_login
mailboxdir = os.path.join(conf.Maildir, username)
logger.info("Appending Email to %s %s" % (repr(mailboxdir),logcomment))
f_out = open ( mailboxdir , 'a' )
f_out.write( mail )
f_out.close()
def split_mailbox_into_strings( inbox ):
"""Takes filename of inbox containing one or more email, and
returns list of strings, each containing one email
"""
fin = open(inbox,"r")
mailstrings = []
mailtext = []
linecounter = 0
while 1:
line = fin.readline()
linecounter += 1
if (linecounter%10000)==0:
log_global.debug("read line %d: %s" % (linecounter,line[0:30]))
if not line:
if debug:
print "reached end of file"
#if we have found any data
if mailtext != []:
# append last data (as string) to list of emails
mailstrings.append( string.join(mailtext,'') )
else:
#this indicates an empty inbox file
pass
break #reached end of file
if line[0:5] == "From ": #found new email, start new file
log_global.debug("Found new 'From' in mailbox file")
#this will make the first list entry 'None'. Remove that before we return
mailstrings.append( string.join(mailtext,'') )
mailtext = [line]
if debug:
print "Starting new mailf file"
# Emails that contain "From " on a single line get that escaped to ">From",
# and ">From" will be escaped to ">>From" etc.
fmatch=re.match(_rx_from_escape,line)
if fmatch:
line=fmatch.group(1)
#write line to currently active mailfile
try:
mailtext.append(line)
except IndexError:
log_global.exception("Error: file %s didn't start with 'From'" % repr(inbox))
raise IndexError
except:
print "Came across some other error while reading inbox"
sys.exit(1)
fin.close()
return mailstrings[1:]
def email_address_username_and_domain(addr):
"""Maps an email address such as user@example.org to username
and domain part.
Note: this routine splits at the last '@' sign, having checked
only that the given address contains at least one '@' sign and at
least one '.' character.
It is possible to have a valid email address with multiple '@'
signs, (see e.g. the informal RFC3696), and this routine should
work in these cases.
"""
if (addr.count('@') < 1 or addr.count('.') < 1):
subject = "WARNING: Invalid address on incoming email from %s" % repr(addr)
text = ("Email from %s, address either has fewer than one '.' character,\n"
"or fewer than one '@' character." % repr(addr))
enqueue_outgoing_mails.send_text_message(conf.SysadminEmail, conf.ModuleEmailAddress, text, subject)
log_global.info("Emailed sysadmin about regex failure of splitting address %s. " % addr)
raise StandardError,"Unusual email address : '%s" % repr(addr)
try:
parts = addr.split('@')
domain = parts[-1]
username = ''
for p in parts[0:-2]:
username = username + p + '@'
username = username + parts[-2]
except:
# otherwise send message to admin
subject = "WARNING: Address split failed on incoming email from %s" % repr(addr)
text = "Email from %s. We split to find name='%s' and domain='%s'" % (addr,username,domain)
enqueue_outgoing_mails.send_text_message(conf.SysadminEmail, conf.ModuleEmailAddress, text, subject)
log_global.info("Emailed sysadmin about failure of splitting address %s. " % addr)
raise StandardError,"Unusual email address : '%s" % repr(addr)
return username, domain
def get_email_metadata( the_email, debug = 0 ):
"""expects email as msg object from email.Message.Message()
Returns sender, login, domain, number of attachments and subject line.
"""
#identify student
(real_name, email_addr) = email.Utils.parseaddr( the_email["From"] )
(username,domain) = email_address_username_and_domain(email_addr)
subject = str(the_email["Subject"])
payload = the_email.get_payload()
#compute number of attachments
if type( payload ) == types.ListType:
n_attach = len( payload )
else:
n_attach = 1
if real_name=='':
log_global.info("Incoming email from username='%s', subject='%s' has no realname. Will look up in student list file" % (username,subject))
real_name = user_realname(username)
result= (real_name, email_addr, username, domain, n_attach, subject)
return result
def sending_domain_okay(domain,known_domains=["example.org","example.org.uk"]):
pattern = "(%s)$" % reduce(lambda x,y:(x+"|"+y),["\\b"+re.escape(d) for d in known_domains])
# This turns known_domains into word-boundary-delimited matches, i.e.
# soton.ac.uk -> \bsoton\.ac\.uk, then builds a termimal-or-pattern from that.
return is_true(re.search(pattern,domain,re.IGNORECASE))
def replymail_confirm_submission(real_name, email_addr, text, subject, assignment, valid_attachments, q_id=None):
""" Sends an email to the student named real_name at address
email_addr, which consists of a confirmation of receipt of their
email whose subject should be in subject, with a q_id (if
assigned) followed by the contents of the traing text.
Returns the sent message as string if there are no attachments, or
None."""
intro = "Dear "+real_name+" ("+email_addr+"),\n\n" + \
textwrap.fill("this email confirms the receipt of your email\n" + \
"with subject %s at %s." %( repr(subject), time.asctime()))+"\n\n"
if q_id:
if valid_attachments:
intro += textwrap.fill("Your submitted files have been added to the "+\
"testing queue (id=%s).\n" % (q_id) + \
"You will receive a separate email with the "+\
"testing results.")+"\n\n"
newsubject = "["+conf.ModulecodeSubjectLine+"] Submission Confirmation "\
+str(assignment)+" ("+time.asctime()+")"
else:
intro += textwrap.fill("Your files will be archived.")+"\n\n"
newsubject = "["+conf.ModulecodeSubjectLine+"] Archive confirmation "+str(assignment)+" ("+time.asctime()+")"
return enqueue_outgoing_mails.send_text_message( email_addr, conf.ModuleEmailAddress, intro+text, newsubject)
def replymail_error( msg, text, CC_to_admin=False, maxsend=None ):
"""Takes message 'msg' and composes a reply to the sender with body 'text'.
If CC_to_admin is true, the message will be sent to the sysadmin as well.
The idea of maxsend is that if maxsend is given, we will only attempt maxsend time
to deliver email to the same email address. This could be useful if we engage in
infinite loops with external spam. The code isn't written for this yet.
"""
real_name, email_addr = email.Utils.parseaddr( msg["From"] )
log_global.debug("in replymail_error (to %s, subj: %s)" % (real_name,msg["Subject"]))
text = "Dear "+str(real_name)+" ("+str(email_addr)+"),\n\n" + \
"An error occured while parsing your email with subject\n" + \
repr(msg["Subject"])+" received at "+time.asctime()+":\n\n"+ \
text
if CC_to_admin:
subject = "["+conf.ModulecodeSubjectLine+"-admin] submission error, "+time.ctime(time.time())+", "+str(msg["Subject"])
enqueue_outgoing_mails.send_text_message( conf.SysadminEmail, conf.ModuleEmailAddress, text, subject)
subject = "["+conf.ModulecodeSubjectLine+"] submission error, "+time.ctime(time.time())+", "+str(msg["Subject"])
return enqueue_outgoing_mails.send_text_message( email_addr, conf.ModuleEmailAddress, text, subject)
def check_required_files(file_map, file_names):
""" Given a file_map for the assignment, the names of all
attachments taken from the current email in file_names, and a
logger, this function checks that each file marked mandatory was
extracted from an attachment.
Returns (missing_required_files, all_mandatory_files_present),
where missing_required_files is a list of names of missing
required files and all_mandatory_files_present is a boolean."""
all_mandatory_files_present = True
missing_required_files=[]
# Loop over all filenames known for this assignment.
for assignment_file_name in file_map.keys():
(ftype, fpriority) = file_map[assignment_file_name]
# File is mandatory but was not attached.
if ((ftype == 'mandatory') and (assignment_file_name not in file_names)):
all_mandatory_files_present = False
missing_required_files.append(assignment_file_name)
return (all_mandatory_files_present, missing_required_files)
def submission_reply_report(student_dir, attachments, lab_name):
""" Returns a tuple (valid_attachments, reply) where:
valid_attachments = True if all the files marked 'mandatory' are
present in the attachments passed in, False otherwise.
reply is a string containing a report that gives details of which
files were saved from the student submission and which files were
already stored in student_dir."""
valid_attachments = False
report=[]
log_local = mylogger.attach_to_logfile( os.path.join( student_dir,'log.txt' ), level = logging.DEBUG )
log_global.debug("Attachment keys: %s" % repr(attachments.keys()))
files_by_type = analyze_filenames(assignment_file_map(lab_name), attachments.keys(), log_global)
log_global.debug("files_by_type: %s" % repr(files_by_type))
(valid_attachments, missing_required_files) = check_required_files(assignment_file_map(lab_name), attachments.keys())
nr_files=reduce(lambda sf,x:sf+len(files_by_type[x]),files_by_type.keys(),0)
log_global.info("attachments: %s files_by_type: %s"%(repr(attachments.keys()),repr(files_by_type)))
# All required files were extracted.
if (nr_files > 0 and valid_attachments == True):
report.append("### Files found in this submission for assessment '%s' ###\n\n" % lab_name)
ftypes=files_by_type.keys()
ftypes.sort()
for ftype in ftypes:
files=files_by_type[ftype]
fstr=string.join(files,"\n ")
report.append(" %-12s:\n %s\n" % (ftype,fstr))
report.append("\n")
# Some files were extracted but not all the required ones (or
# files not correctly named, ...)
elif (len(missing_required_files) > 0):
report.append("### WARNING: this submission will not be tested.\n\n")
report.append("### Not all the required files were extracted from your email.\n")
report.append("### Please check that you have attached all the required files.\n\n")
report.append("### Files found in this submission for assessment '%s':\n\n" % lab_name)
ftypes=files_by_type.keys()
ftypes.sort()
for ftype in ftypes:
files=files_by_type[ftype]
fstr=string.join(files,"\n ")
report.append(" %-12s:\n %s\n" % (ftype,fstr))
report.append("\n")
report.append("### Required files not found in this submission:\n\n")
for filename in missing_required_files:
report.append(" %-12s\n" % filename)
report.append("\n\n")
# No files extracted.
else:
report.append("WARNING: no files have been extracted from your email.\n")
report.append(" (Maybe you have forgotten to attach them?)\n\n")
#get list of files in student_dir
submitted_files = os.listdir(os.path.join(student_dir,lab_name))
#remove log files from these and separate into known and unknown files
submitted_by_type = analyze_filenames(assignment_file_map(lab_name), submitted_files, log_global)
report.append("-----------------------------------------------------\n\n")
report.append("In summary, you have submitted these files for assessment '%s'.\n" % lab_name)
report.append("Please note that the files listed below may be from previous submission\n")
report.append("attempts, where these are allowed:\n\n")
report.append(submitted_files_report(lab_name, submitted_by_type))
report.append("\n\n == IMPORTANT: Keep this email as a receipt of your submission ==\n")
return (valid_attachments, string.join(report,""))
def check_maildaemon( msg, from_addr, domain ):
if re.search(_rx_email_daemon,from_addr,re.IGNORECASE):
log_global.critical("Received a msg from a mailing daemon (X)! ("+msg["From"]+")")
log_global.critical("Forwarding it to administrator (%s)." % (conf.SysadminEmail) )
# We need to delete the "To" key entry first:
del msg["To"]
# Then write new value
msg["To"] = conf.SysadminEmail
original_subject = msg["Subject"]
del msg["Subject"]
msg["Subject"] = "Urgent: [%s] email from daemon: %s" % (conf.ModulecodeSubjectLine,repr(original_subject))
del msg["From"]
msg["From"] = conf.ModuleEmailAddress
enqueue_outgoing_mails.mailqueue_push(msg)
return 1
#check that the email is not from ourselves
if string.count( from_addr.lower(), conf.Modulecode.lower()):
log_global.critical("Received a msg from myself! ("+msg["From"]+")")
log_global.critical("Forwarding it to administrator (%s)." % repr(conf.SysadminEmail) )
subject = "Urgent: [%s] email from system (danger of loop): %s" % (conf.ModulecodeSubjectLine,repr(msg["Subject"]))
sendmail = enqueue_outgoing_mails.send_text_message( conf.SysadminEmail, conf.ModuleEmailAddress, msg.as_string(), subject)
append_mail_to_mailbox( sendmail, conf.sysadminmailfolder, log_global, "(outgoing mail to SysadminEmail (myself-loop))" )
return 1
if string.count(domain.lower(), "twitter"):
log_global.info("Received a msg from twitter: ("+msg["From"]+")")
log_global.info("Forwarding it to administrator (%s)." % (conf.SysadminEmail) )
# We need to delete the "To" key entry first:
del msg["To"]
#Then write new value
msg["To"] = conf.SysadminEmail
original_subject = msg["Subject"]
del msg["Subject"]
msg["Subject"] = "[%s] Twitter writes: %s" % (\
conf.ModulecodeSubjectLine,repr(original_subject))
del msg["From"]
msg["From"] = conf.ModuleEmailAddress
enqueue_outgoing_mails.mailqueue_push(msg)
return 1
return 0
def subject_identification( assignments, submission, log_global):
"""Only sensible submissions are labX (with X integer number) and
coursework.
submission is a string that contains the subject line.
Assignments is coming through from configuration file.
return None if could not identify
"""
debug = 0
if debug:
print 'trying to identify %20s\t: ' % submission,
if submission == None:
log_global.warn("unparseable subject: '%s' " % submission)
return None
if submission == '':
log_global.warn("unparseable empty string in subject: '%s' " % submission)
return None
assert len(assignments.keys()) > 0, "Internal error"
match_spam=re.match(r"\{Spam?\}(.*)",submission)
if match_spam:
submission=match_spam.group(1)
log_global.warn("stripping off '{Spam?}' from subject line: %s " % repr(submission))
forward_reply_keywords = ['Fwd:', 'Forward:', 'Re:', 'Reply:']
for kwd in forward_reply_keywords:
if kwd in submission:
log_global.warn("stripping off '{}' from subject line: {} ".format(
kwd, submission))
submission = submission.replace(kwd, '')
continue
#The list of relevant keywords we are after is
keys = [x.lower() for x in assignments.keys()] #this is ['lab1','lab2',...,'lab6','cw']
#check whether subject line is preceeded by "{Spam?} ". If so,
#then the mailing system thinks it is spam. This could be wrong, however.
#we therefore get rid of this, and log it.
canonicalized_submission=submission.replace(' ','').lower()
#do trivial test (i.e. input is 'lab1' or 'cw')
if canonicalized_submission in keys:
return canonicalized_submission
log_global.warn("unparseable string: %s / %s" % (submission, canonicalized_submission))
return None
def subtestqueue_push(metadata):
counterfile = os.path.join(conf.subtest_queue,'.maxid')
if not os.path.exists(counterfile):
#make sure directory exists
os.system('mkdir -p %s' % conf.subtest_queue)
open(counterfile,'w').write("0")
f = open(counterfile, 'r+')
fcntl.flock(f.fileno(), fcntl.LOCK_EX)
q_id = int(f.read())+1
f.seek(0)
f.truncate(0)
f.write("%d" % q_id)
fcntl.flock(f.fileno(), fcntl.LOCK_UN)
f.close()
filename = "s%05d-%s-%s" % (q_id,metadata['assignment'],metadata['login'])
log_global.info("Injecting job (id=%d) to testing-queue entry '%s'" % (q_id,filename))
metadata['id']=q_id
metadata['qfilename']=filename
metadata['qfilepath']=os.path.join(conf.subtest_queue,filename)
assert os.path.exists(os.path.join(conf.subtest_queue,filename))==False,"Internal error"
f=open(os.path.join(conf.subtest_queue,filename),'w')
f.write(pprint.pformat(metadata))
f.close()
return q_id
def process_inbox():
log_global.debug("process_inbox: lockdire=%s" % conf.Lockdir)
semaphore=lock_semaphore(conf.Lockdir)
if(not semaphore):
os.system('echo "\n---------------\n`date`"')
print "It seems that another version of this script is running already... Leaving cowardly."
print "Remove LOCKFILE in %s to overrride this" % repr(conf.Lockdir)
log_global.warn("Found LOCK, exiting now!")
return None
# now check whether there is anything in the mailbox file. If
# not, there is no point carrying on
#test whether mailbox file exists
if not os.path.exists(conf.inbox):
raise StandardError, "Inbox file %s does not exist" % repr(conf.inbox)
print("Trying to read from inbox {}".format(conf.inbox))
if int(os.popen("wc -c "+conf.inbox).read().split()[0]) == 0:
log_global.info("Inbox is empty. Quitting." )
unlock_semaphore(semaphore)
return None
#lock mailbox file
finbox = open(conf.inbox, 'r+')
fcntl.flock(finbox.fileno(), fcntl.LOCK_EX)
# copy file to tmp-file before we start analysing
tempname = conf.tmpname
log_global.debug("Copying %s to %s" % (repr(conf.inbox),repr(tempname)))
shutil.copyfile( conf.inbox, tempname )
shutil.copymode( conf.inbox, tempname ) # also copy permissions
#now delete mailbox
finbox.seek(0)
finbox.truncate(0)
#unlock
fcntl.flock(finbox.fileno(), fcntl.LOCK_UN)
finbox.close()
mails = split_mailbox_into_strings( tempname )
log_global.info("=====> found %d emails in inbox %s" % (len(mails),repr(conf.inbox)))
counter = 0
for mail in mails:
counter += 1
log_global.debug("(1) processing mail %d" % counter)
msg = email.message_from_string( mail )
(real_name, email_addr, email_login, domain, n_attach, subject) = get_email_metadata( msg )
#keep copy of email in folder with ALL incoming email (just in case)
append_mail_to_mailbox( mail, '_allincomingemail', log_global, "(keep copy of all incoming email in _allincomingemail)" )
log_global.info("%i: from %s (%s), ATT: %d, SUB: %s" % (counter,repr(real_name),repr(email_addr),n_attach,repr(subject)))
#check for special events (are we getting mail from a daemon?)
if check_maildaemon( msg, email_login, domain ):
log_global.info("(2a) sent email to administrator. Skipping to next student")
continue
#Check whether we need to check for particular users
if conf.allow_only_emails_given_in_list:
log_global.debug("(3a) only users given in list are allowed")
if email_addr.lower() in map(string.lower,lab_helpers.allowed_email_addresses()):
log_global.debug("Incoming email from %s is in list of acceptable emails" % email_addr)
else:
log_global.warn("rejecting email from addresss %s (not in allowed list)" % (email_addr))
error_msg = replymail_error(msg, conf.TXT_address,CC_to_admin=True,maxsend=None)
append_mail_to_mailbox( error_msg, '_errors', log_global, "(outgoing error mail: sending email_address (%s) unknown)" % (email_addr) )
continue
else: #do not check for address in list. Instead, check domain
log_global.debug("(3b) Allow all users from right domain")
if True: #set to False to allow emails from any domain (initially for St Poelten Summer School July 2011)
if not sending_domain_okay( domain ):
log_global.warn("rejecting email from %s (wrong domain)" % (email_addr))
error_msg = replymail_error(msg, conf.TXT_Domain,CC_to_admin=True)
append_mail_to_mailbox( error_msg, '_errors', log_global, "(outgoing error mail: wrong domain (%s))" % (email_addr) )
continue
#now we know the student
log_global.debug("(2) domain okay, student is %s (%s)" % (repr(email_login),repr(real_name)))
#check that the directory exists:
student_dirpart = email_login
student_dir = os.path.join( conf.Submissiondir, student_dirpart)
if not os.path.exists( student_dir ):
log_global.debug("Creating directory %s" % (repr(student_dir)))
os.mkdir(student_dir)
else:
log_global.debug(" Student directory exists (%s)" % (repr(student_dir)))
#connect to log file for user
logger = mylogger.attach_to_logfile( os.path.join( conf.Submissiondir, email_login,'log.txt' ), level = log_level )
logger.info(20*"-"+"studentdata:"+repr(email_login)+":"+repr(real_name))
#keep copy of mail in Maildir
append_mail_to_mailbox( mail, email_login, logger, "(incoming mail from {})".format(email_login) )
retrieval = False
#check whether this is a retrieval attempt
if is_retrieval(subject):
logger.info("Identified retrieval attempt (%s)" % (repr(subject)) )
retrieval = True
#chop off 'retrieve' from subject line
remaining_subject=re.match(r"\s*retrieve\s+(.*)",subject,re.IGNORECASE).group(1)
assignment = subject_identification( conf.assignments, remaining_subject, log_global)
log_global.warn("RETR: S=%s rems=%s a=%s" % (subject,remaining_subject,assignment))
else:
#check lab makes sense
assignment = subject_identification( conf.assignments, subject, log_global)
if assignment == None:
log_global.warn("rejecting email from %s (unknown submission: %s)" % (repr(email_addr),repr(subject)))
logger.warn("rejecting email (unknown submission: %s)" % (repr(subject)))
errormail = replymail_error(msg, conf.TXT_Submission)
append_mail_to_mailbox( errormail, email_login, logger, "(outgoing error mail: couldn't parse assignment)" )
continue #no need to carry on further
else:
if retrieval:
retrieve_assignment(assignment,msg,student_dir,real_name,email_addr,logger)
continue # no need to carry on further
#normal submission continues here
logger.info("found submission for %s (%s)" % (assignment,repr(subject)))
log_global.info("found submission for %s from %s" % (assignment,repr(email_login)))
#check that the directory exists:
student_lab_dir = os.path.join( conf.Submissiondir, student_dirpart, assignment )
if not os.path.exists( student_lab_dir ):
log_global.debug("Creating directory %s" % repr(student_lab_dir))
os.mkdir( student_lab_dir )
#check that files make sense
attachments = save_attachments( msg, student_lab_dir )
#generate report to be mailed to the student, and set
#valid_attachments to True if all the required files were
#attached to *this message*
(valid_attachments, reply) = submission_reply_report(student_dir, attachments, assignment)
#If we have the required attachments, check whether submission
#tests are associated with this assignment, and push a job to
#the test queue
log_global.debug("Have-found-valid_attachments = {}".format(valid_attachments))
we_have_a_testfile_for_this_submission = assignment in conf.subtest_tests.keys()
log_global.debug("Have-we-got-a-test-file-for-this-submission = {}"\
.format(we_have_a_testfile_for_this_submission))
if we_have_a_testfile_for_this_submission and valid_attachments:
log_global.debug("Found assignment {} in subtest.keys.".format(assignment))
subtest_metadata = {'student_lab_dir':student_lab_dir,
'assignment':assignment,
'real_name':real_name,
'email':email_addr,
'login':email_login,
'subject':subject,
'time':time.asctime()}
q_id = subtestqueue_push(subtest_metadata) #read Queue-id
# Compose and send an email to the student, based on the
# report generated above.
confirm_mail = replymail_confirm_submission(real_name, email_addr, reply, subject, assignment, valid_attachments, q_id)
append_mail_to_mailbox(confirm_mail, email_login, logger, "(outgoing confirmation mail; job submitted for testing)")
elif valid_attachments == True and we_have_a_testfile_for_this_submission == False:
log_global.info("Did not find assignment {} in subtest.keys={}".format(assignment, conf.subtest_tests.keys()))
q_id = None
confirm_mail = replymail_confirm_submission(real_name, email_addr, reply, subject, assignment, valid_attachments, q_id)
append_mail_to_mailbox(confirm_mail, email_login, logger, "(outgoing confirmation email - no testing to follow)")
elif valid_attachments == False:
# the function 'submission_reply_report' above sends an error message in this case so we don't need to do anything here.
error_mail = replymail_error(msg, reply)
append_mail_to_mailbox(error_mail, email_login, logger, "(outgoing error mail - attachmenns not valid)")
else:
raise RuntimeError("This should be impossible")
log_global.info("Finish.proc. %d emails. Rm %s and quit" % (len(mails),tempname)
unlock_semaphore(semaphore)
def startup():
"""check all directories are in place"""
if not os.path.exists( conf.Homedir ):
raise StandardError, "%s does not exist (but is Homedir)" % conf.Homedir
log_global = mylogger.attach_to_logfile( conf.Logfile, level = log_level )
print "reached startup(), logfile is %s" % conf.Logfile
log_global.debug(40*"=")
log_global.debug("Starting program up")
log_global.debug(40*"=")
#check directories
dirs = [conf.Maildir,conf.Submissiondir,conf.Tempdir]
for dir in dirs:
if not os.path.exists( dir ):
log_global.info("Creating directory %s" % ( dir ) )
os.mkdir( dir )
return log_global
if __name__ == "__main__":
#set everything up
log_global = startup()
enqueue_outgoing_mails.log_global = log_global
enqueue_outgoing_mails.conf = conf
live = True
if live:
try:
process_inbox()
except:
log_global.exception("Something went wrong (caught globally)")
log_global.critical("Preparing email to sysadmin (%s)" % repr(conf.SysadminEmail))
ins,outs = os.popen4('tail -n 100 '+conf.Logfile)
text = outs.read()
subject = "URGENT: Malfunction in %s at %s !!!" % (conf.ModulecodeSubjectLine,time.asctime())
enqueue_outgoing_mails.send_text_message( conf.SysadminEmail, conf.ModuleEmailAddress,text, subject)
log_global.info("Leaving now (not removing lockfile).")
else:
process_inbox()
import datetime,time
f=open(conf.pulsefile,'w')
data = {'now-secs':time.time(),'now-ascii':time.ctime(),'module':conf.ModulecodeSubjectLine,
'what':"process-emails"}
f.write("%s" % repr(data))
f.close()
log_global.debug("About to leave, updated pulse.")
| 39.883065
| 150
| 0.658427
|
7906251dbf3c4f92a779bbac39f599cf597effec
| 17,720
|
py
|
Python
|
mne/tests/test_report.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | null | null | null |
mne/tests/test_report.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | 23
|
2017-09-12T11:08:26.000Z
|
2019-10-04T11:11:29.000Z
|
mne/tests/test_report.py
|
fmamashli/mne-python
|
52f064415e7c9fa8fe243d22108dcdf3d86505b9
|
[
"BSD-3-Clause"
] | 3
|
2019-01-28T13:48:00.000Z
|
2019-07-10T16:02:11.000Z
|
# -*- coding: utf-8 -*-
# Authors: Mainak Jas <mainak@neuro.hut.fi>
# Teon Brooks <teon.brooks@gmail.com>
#
# License: BSD (3-clause)
import copy
import glob
import os
import os.path as op
import shutil
import numpy as np
from numpy.testing import assert_equal
import pytest
from matplotlib import pyplot as plt
from mne import Epochs, read_events, read_evokeds
from mne.io import read_raw_fif
from mne.datasets import testing
from mne.report import Report, open_report, _ReportScraper
from mne.utils import (_TempDir, requires_mayavi, requires_nibabel, Bunch,
run_tests_if_main, traits_test, requires_h5py)
from mne.viz import plot_alignment
data_dir = testing.data_path(download=False)
subjects_dir = op.join(data_dir, 'subjects')
report_dir = op.join(data_dir, 'MEG', 'sample')
raw_fname = op.join(report_dir, 'sample_audvis_trunc_raw.fif')
ms_fname = op.join(data_dir, 'SSS', 'test_move_anon_raw.fif')
event_fname = op.join(report_dir, 'sample_audvis_trunc_raw-eve.fif')
cov_fname = op.join(report_dir, 'sample_audvis_trunc-cov.fif')
fwd_fname = op.join(report_dir, 'sample_audvis_trunc-meg-eeg-oct-6-fwd.fif')
trans_fname = op.join(report_dir, 'sample_audvis_trunc-trans.fif')
inv_fname = op.join(report_dir,
'sample_audvis_trunc-meg-eeg-oct-6-meg-inv.fif')
mri_fname = op.join(subjects_dir, 'sample', 'mri', 'T1.mgz')
base_dir = op.realpath(op.join(op.dirname(__file__), '..', 'io', 'tests',
'data'))
evoked_fname = op.join(base_dir, 'test-ave.fif')
def _get_example_figures():
"""Create two example figures."""
fig1 = plt.plot([1, 2], [1, 2])[0].figure
fig2 = plt.plot([3, 4], [3, 4])[0].figure
return [fig1, fig2]
@pytest.mark.slowtest
@testing.requires_testing_data
def test_render_report():
"""Test rendering -*.fif files for mne report."""
tempdir = _TempDir()
raw_fname_new = op.join(tempdir, 'temp_raw.fif')
ms_fname_new = op.join(tempdir, 'temp_ms_raw.fif')
event_fname_new = op.join(tempdir, 'temp_raw-eve.fif')
cov_fname_new = op.join(tempdir, 'temp_raw-cov.fif')
fwd_fname_new = op.join(tempdir, 'temp_raw-fwd.fif')
inv_fname_new = op.join(tempdir, 'temp_raw-inv.fif')
for a, b in [[raw_fname, raw_fname_new],
[ms_fname, ms_fname_new],
[event_fname, event_fname_new],
[cov_fname, cov_fname_new],
[fwd_fname, fwd_fname_new],
[inv_fname, inv_fname_new]]:
shutil.copyfile(a, b)
# create and add -epo.fif and -ave.fif files
epochs_fname = op.join(tempdir, 'temp-epo.fif')
evoked_fname = op.join(tempdir, 'temp-ave.fif')
# Speed it up by picking channels
raw = read_raw_fif(raw_fname_new, preload=True)
raw.pick_channels(['MEG 0111', 'MEG 0121'])
raw.del_proj()
epochs = Epochs(raw, read_events(event_fname), 1, -0.2, 0.2)
epochs.save(epochs_fname, overwrite=True)
# This can take forever (stall Travis), so let's make it fast
# Also, make sure crop range is wide enough to avoid rendering bug
epochs.average().crop(0.1, 0.2).save(evoked_fname)
report = Report(info_fname=raw_fname_new, subjects_dir=subjects_dir)
with pytest.warns(RuntimeWarning, match='Cannot render MRI'):
report.parse_folder(data_path=tempdir, on_error='raise')
assert repr(report)
# Check correct paths and filenames
fnames = glob.glob(op.join(tempdir, '*.fif'))
for fname in fnames:
assert (op.basename(fname) in
[op.basename(x) for x in report.fnames])
assert (''.join(report.html).find(op.basename(fname)) != -1)
assert_equal(len(report.fnames), len(fnames))
assert_equal(len(report.html), len(report.fnames))
assert_equal(len(report.fnames), len(report))
# Check saving functionality
report.data_path = tempdir
fname = op.join(tempdir, 'report.html')
report.save(fname=fname, open_browser=False)
assert (op.isfile(fname))
with open(fname, 'rb') as fid:
html = fid.read().decode('utf-8')
assert '(MaxShield on)' in html
assert_equal(len(report.html), len(fnames))
assert_equal(len(report.html), len(report.fnames))
# Check saving same report to new filename
report.save(fname=op.join(tempdir, 'report2.html'), open_browser=False)
assert (op.isfile(op.join(tempdir, 'report2.html')))
# Check overwriting file
report.save(fname=op.join(tempdir, 'report.html'), open_browser=False,
overwrite=True)
assert (op.isfile(op.join(tempdir, 'report.html')))
# Check pattern matching with multiple patterns
pattern = ['*raw.fif', '*eve.fif']
with pytest.warns(RuntimeWarning, match='Cannot render MRI'):
report.parse_folder(data_path=tempdir, pattern=pattern)
assert (repr(report))
fnames = glob.glob(op.join(tempdir, '*.raw')) + \
glob.glob(op.join(tempdir, '*.raw'))
for fname in fnames:
assert (op.basename(fname) in
[op.basename(x) for x in report.fnames])
assert (''.join(report.html).find(op.basename(fname)) != -1)
pytest.raises(ValueError, Report, image_format='foo')
pytest.raises(ValueError, Report, image_format=None)
# SVG rendering
report = Report(info_fname=raw_fname_new, subjects_dir=subjects_dir,
image_format='svg')
with pytest.warns(RuntimeWarning, match='Cannot render MRI'):
report.parse_folder(data_path=tempdir, on_error='raise')
# ndarray support smoke test
report.add_figs_to_section(np.zeros((2, 3, 3)), 'caption', 'section')
with pytest.raises(TypeError, match='Each fig must be a'):
report.add_figs_to_section('foo', 'caption', 'section')
with pytest.raises(TypeError, match='Each fig must be a'):
report.add_figs_to_section(['foo'], 'caption', 'section')
@testing.requires_testing_data
def test_report_raw_psd_and_date():
"""Test report raw PSD and DATE_NONE functionality."""
with pytest.raises(TypeError, match='dict'):
Report(raw_psd='foo')
tempdir = _TempDir()
raw = read_raw_fif(raw_fname).crop(0, 1.).load_data()
raw_fname_new = op.join(tempdir, 'temp_raw.fif')
raw.save(raw_fname_new)
report = Report(raw_psd=True)
report.parse_folder(data_path=tempdir, render_bem=False,
on_error='raise')
assert isinstance(report.html, list)
assert 'PSD' in ''.join(report.html)
assert 'GMT' in ''.join(report.html)
# DATE_NONE functionality
report = Report()
raw.anonymize()
raw.save(raw_fname_new, overwrite=True)
report.parse_folder(data_path=tempdir, render_bem=False,
on_error='raise')
assert isinstance(report.html, list)
assert 'GMT' not in ''.join(report.html)
@testing.requires_testing_data
@requires_mayavi
@traits_test
def test_render_add_sections():
"""Test adding figures/images to section."""
tempdir = _TempDir()
report = Report(subjects_dir=subjects_dir)
# Check add_figs_to_section functionality
fig = plt.plot([1, 2], [1, 2])[0].figure
report.add_figs_to_section(figs=fig, # test non-list input
captions=['evoked response'], scale=1.2,
image_format='svg')
pytest.raises(ValueError, report.add_figs_to_section, figs=[fig, fig],
captions='H')
pytest.raises(ValueError, report.add_figs_to_section, figs=fig,
captions=['foo'], scale=0, image_format='svg')
pytest.raises(ValueError, report.add_figs_to_section, figs=fig,
captions=['foo'], scale=1e-10, image_format='svg')
# need to recreate because calls above change size
fig = plt.plot([1, 2], [1, 2])[0].figure
# Check add_images_to_section with png
img_fname = op.join(tempdir, 'testimage.png')
fig.savefig(img_fname)
report.add_images_to_section(fnames=[img_fname],
captions=['evoked response'])
report.add_images_to_section(fnames=[img_fname],
captions=['evoked response'])
pytest.raises(ValueError, report.add_images_to_section,
fnames=[img_fname, img_fname], captions='H')
pytest.raises(ValueError, report.add_images_to_section,
fnames=['foobar.xxx'], captions='H')
evoked = read_evokeds(evoked_fname, condition='Left Auditory',
baseline=(-0.2, 0.0))
fig = plot_alignment(evoked.info, trans_fname, subject='sample',
subjects_dir=subjects_dir)
report.add_figs_to_section(figs=fig, # test non-list input
captions='random image', scale=1.2)
assert (repr(report))
@pytest.mark.slowtest
@testing.requires_testing_data
@requires_mayavi
@traits_test
@requires_nibabel()
def test_render_mri():
"""Test rendering MRI for mne report."""
tempdir = _TempDir()
trans_fname_new = op.join(tempdir, 'temp-trans.fif')
for a, b in [[trans_fname, trans_fname_new]]:
shutil.copyfile(a, b)
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
report.parse_folder(data_path=tempdir, mri_decim=30, pattern='*')
report.save(op.join(tempdir, 'report.html'), open_browser=False)
assert repr(report)
report.add_bem_to_section('sample', caption='extra', section='foo',
subjects_dir=subjects_dir, decim=30)
report.save(op.join(tempdir, 'report.html'), open_browser=False,
overwrite=True)
@testing.requires_testing_data
@requires_nibabel()
def test_render_mri_without_bem():
"""Test rendering MRI without BEM for mne report."""
tempdir = _TempDir()
os.mkdir(op.join(tempdir, 'sample'))
os.mkdir(op.join(tempdir, 'sample', 'mri'))
shutil.copyfile(mri_fname, op.join(tempdir, 'sample', 'mri', 'T1.mgz'))
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=tempdir)
report.parse_folder(tempdir, render_bem=False)
report.save(op.join(tempdir, 'report.html'), open_browser=False)
@testing.requires_testing_data
@requires_nibabel()
def test_add_htmls_to_section():
"""Test adding html str to mne report."""
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
html = '<b>MNE-Python is AWESOME</b>'
caption, section = 'html', 'html_section'
report.add_htmls_to_section(html, caption, section)
idx = report._sectionlabels.index('report_' + section)
html_compare = report.html[idx]
assert (html in html_compare)
assert (repr(report))
def test_add_slider_to_section():
"""Test adding a slider with a series of images to mne report."""
tempdir = _TempDir()
report = Report(info_fname=raw_fname,
subject='sample', subjects_dir=subjects_dir)
section = 'slider_section'
figs = _get_example_figures()
report.add_slider_to_section(figs, section=section, title='my title')
assert report.fnames[0] == 'my title-#-report_slider_section-#-custom'
report.save(op.join(tempdir, 'report.html'), open_browser=False)
pytest.raises(NotImplementedError, report.add_slider_to_section,
[figs, figs])
pytest.raises(ValueError, report.add_slider_to_section, figs, ['wug'])
pytest.raises(TypeError, report.add_slider_to_section, figs, 'wug')
# need at least 2
pytest.raises(ValueError, report.add_slider_to_section, figs[:1], 'wug')
# Smoke test that SVG w/unicode can be added
report = Report()
fig, ax = plt.subplots()
ax.set_xlabel(u'μ')
report.add_slider_to_section([fig] * 2, image_format='svg')
def test_validate_input():
"""Test Report input validation."""
report = Report()
items = ['a', 'b', 'c']
captions = ['Letter A', 'Letter B', 'Letter C']
section = 'ABCs'
comments = ['First letter of the alphabet.',
'Second letter of the alphabet',
'Third letter of the alphabet']
pytest.raises(ValueError, report._validate_input, items, captions[:-1],
section, comments=None)
pytest.raises(ValueError, report._validate_input, items, captions, section,
comments=comments[:-1])
values = report._validate_input(items, captions, section, comments=None)
items_new, captions_new, comments_new = values
assert_equal(len(comments_new), len(items))
@requires_h5py
def test_open_report():
"""Test the open_report function."""
tempdir = _TempDir()
hdf5 = op.join(tempdir, 'report.h5')
# Test creating a new report through the open_report function
fig1 = _get_example_figures()[0]
with open_report(hdf5, subjects_dir=subjects_dir) as report:
assert report.subjects_dir == subjects_dir
assert report._fname == hdf5
report.add_figs_to_section(figs=fig1, captions=['evoked response'])
# Exiting the context block should have triggered saving to HDF5
assert op.exists(hdf5)
# Load the HDF5 version of the report and check equivalence
report2 = open_report(hdf5)
assert report2._fname == hdf5
assert report2.subjects_dir == report.subjects_dir
assert report2.html == report.html
assert report2.__getstate__() == report.__getstate__()
assert '_fname' not in report2.__getstate__()
# Check parameters when loading a report
pytest.raises(ValueError, open_report, hdf5, foo='bar') # non-existing
pytest.raises(ValueError, open_report, hdf5, subjects_dir='foo')
open_report(hdf5, subjects_dir=subjects_dir) # This should work
# Check that the context manager doesn't swallow exceptions
with pytest.raises(ZeroDivisionError):
with open_report(hdf5, subjects_dir=subjects_dir) as report:
1 / 0
def test_remove():
"""Test removing figures from a report."""
r = Report()
fig1, fig2 = _get_example_figures()
r.add_figs_to_section(fig1, 'figure1', 'mysection')
r.add_slider_to_section([fig1, fig2], title='figure1',
section='othersection')
r.add_figs_to_section(fig2, 'figure1', 'mysection')
r.add_figs_to_section(fig2, 'figure2', 'mysection')
# Test removal by caption
r2 = copy.deepcopy(r)
removed_index = r2.remove(caption='figure1')
assert removed_index == 2
assert len(r2.html) == 3
assert r2.html[0] == r.html[0]
assert r2.html[1] == r.html[1]
assert r2.html[2] == r.html[3]
# Test restricting to section
r2 = copy.deepcopy(r)
removed_index = r2.remove(caption='figure1', section='othersection')
assert removed_index == 1
assert len(r2.html) == 3
assert r2.html[0] == r.html[0]
assert r2.html[1] == r.html[2]
assert r2.html[2] == r.html[3]
# Test removal of empty sections
r2 = copy.deepcopy(r)
r2.remove(caption='figure1', section='othersection')
assert r2.sections == ['mysection']
assert r2._sectionvars == {'mysection': 'report_mysection'}
def test_add_or_replace():
"""Test replacing existing figures in a report."""
r = Report()
fig1, fig2 = _get_example_figures()
r.add_figs_to_section(fig1, 'duplicate', 'mysection')
r.add_figs_to_section(fig1, 'duplicate', 'mysection')
r.add_figs_to_section(fig1, 'duplicate', 'othersection')
r.add_figs_to_section(fig2, 'nonduplicate', 'mysection')
# By default, replace=False, so all figures should be there
assert len(r.html) == 4
old_r = copy.deepcopy(r)
# Re-add fig1 with replace=True, it should overwrite the last occurrence of
# fig1 in section 'mysection'.
r.add_figs_to_section(fig2, 'duplicate', 'mysection', replace=True)
assert len(r.html) == 4
assert r.html[1] != old_r.html[1] # This figure should have changed
# All other figures should be the same
assert r.html[0] == old_r.html[0]
assert r.html[2] == old_r.html[2]
assert r.html[3] == old_r.html[3]
def test_scraper(tmpdir):
"""Test report scraping."""
r = Report()
fig1, fig2 = _get_example_figures()
r.add_figs_to_section(fig1, 'a', 'mysection')
r.add_figs_to_section(fig2, 'b', 'mysection')
# Mock a Sphinx + sphinx_gallery config
app = Bunch(builder=Bunch(srcdir=str(tmpdir),
outdir=op.join(str(tmpdir), '_build', 'html')))
scraper = _ReportScraper()
scraper.app = app
gallery_conf = dict(src_dir=app.builder.srcdir, builder_name='html')
img_fname = op.join(app.builder.srcdir, 'auto_examples', 'images',
'sg_img.png')
target_file = op.join(app.builder.srcdir, 'auto_examples', 'sg.py')
os.makedirs(op.dirname(img_fname))
os.makedirs(app.builder.outdir)
block_vars = dict(image_path_iterator=(img for img in [img_fname]),
example_globals=dict(a=1), target_file=target_file)
# Nothing yet
block = None
rst = scraper(block, block_vars, gallery_conf)
assert rst == ''
# Still nothing
block_vars['example_globals']['r'] = r
rst = scraper(block, block_vars, gallery_conf)
# Once it's saved, add it
assert rst == ''
fname = op.join(str(tmpdir), 'my_html.html')
r.save(fname, open_browser=False)
rst = scraper(block, block_vars, gallery_conf)
out_html = op.join(app.builder.outdir, 'auto_examples', 'my_html.html')
assert not op.isfile(out_html)
os.makedirs(op.join(app.builder.outdir, 'auto_examples'))
scraper.copyfiles()
assert op.isfile(out_html)
assert rst.count('"') == 6
assert "<iframe" in rst
assert op.isfile(img_fname.replace('png', 'svg'))
run_tests_if_main()
| 39.116998
| 79
| 0.66772
|
55fceec0dd1073a953a221d316dde2b6e58a9d09
| 3,663
|
py
|
Python
|
experiments/memory/recurrent_q_bptt_ddpg_exp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | 1
|
2020-10-23T14:40:09.000Z
|
2020-10-23T14:40:09.000Z
|
experiments/memory/recurrent_q_bptt_ddpg_exp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | null | null | null |
experiments/memory/recurrent_q_bptt_ddpg_exp.py
|
Asap7772/railrl_evalsawyer
|
baba8ce634d32a48c7dfe4dc03b123e18e96e0a3
|
[
"MIT"
] | 1
|
2021-05-27T20:38:45.000Z
|
2021-05-27T20:38:45.000Z
|
import random
from rlkit.envs.memory.continuous_memory_augmented import \
ContinuousMemoryAugmented
from rlkit.envs.mujoco.water_maze import WaterMazeMemory
from rlkit.exploration_strategies.ou_strategy import OUStrategy
from rlkit.exploration_strategies.product_strategy import ProductStrategy
from rlkit.launchers.launcher_util import run_experiment
from rlkit.memory_states.policies import MemoryPolicy
from rlkit.memory_states.qfunctions import RecurrentMemoryQFunction
from rlkit.torch.bptt_ddpg_rq import BpttDdpgRecurrentQ
def example(variant):
env_class = variant['env_class']
memory_dim = variant['memory_dim']
env_params = variant['env_params']
memory_aug_params = variant['memory_aug_params']
es_params = variant['es_params']
env_es_class = es_params['env_es_class']
env_es_params = es_params['env_es_params']
memory_es_class = es_params['memory_es_class']
memory_es_params = es_params['memory_es_params']
raw_env = env_class(**env_params)
env = ContinuousMemoryAugmented(
raw_env,
num_memory_states=memory_dim,
**memory_aug_params
)
env_strategy = env_es_class(
env_spec=raw_env.spec,
**env_es_params
)
write_strategy = memory_es_class(
env_spec=env.memory_spec,
**memory_es_params
)
es = ProductStrategy([env_strategy, write_strategy])
qf = RecurrentMemoryQFunction(
int(raw_env.observation_space.flat_dim),
int(raw_env.action_space.flat_dim),
memory_dim=memory_dim,
hidden_size=10,
fc1_size=400,
fc2_size=300,
)
policy = MemoryPolicy(
int(raw_env.observation_space.flat_dim),
int(raw_env.action_space.flat_dim),
memory_dim=memory_dim,
fc1_size=400,
fc2_size=300,
)
algorithm = BpttDdpgRecurrentQ(
env,
exploration_strategy=es,
qf=qf,
policy=policy,
**variant['algo_params']
)
algorithm.train()
if __name__ == "__main__":
use_gpu = True
H = 20
subtraj_length = 20
variant = dict(
algo_params=dict(
num_epochs=100,
num_steps_per_epoch=1000,
# num_steps_per_epoch=100,
num_steps_per_eval=H*10,
batch_size=H*32,
max_path_length=H,
use_gpu=use_gpu,
subtraj_length=subtraj_length,
action_policy_learning_rate=1e-3,
write_policy_learning_rate=1e-5,
qf_learning_rate=1e-3,
action_policy_optimize_bellman=True,
write_policy_optimizes='both',
refresh_entire_buffer_period=10,
),
env_params=dict(
# num_steps=H,
horizon=H,
use_small_maze=True,
l2_action_penalty_weight=0,
num_steps_until_reset=0,
),
# env_class=HighLow,
env_class=WaterMazeMemory,
memory_dim=20,
memory_aug_params=dict(
max_magnitude=1,
),
es_params=dict(
env_es_class=OUStrategy,
env_es_params=dict(
max_sigma=1,
min_sigma=None,
),
memory_es_class=OUStrategy,
memory_es_params=dict(
max_sigma=1,
min_sigma=None,
),
),
)
seed = random.randint(0, 9999)
run_experiment(
example,
# exp_prefix="dev-pytorch-recurrent-q-bptt-ddpg",
exp_prefix="6-13-small-memory-no-reset-full-bptt-recurrent",
seed=seed,
mode='here',
variant=variant,
use_gpu=use_gpu,
)
| 29.304
| 73
| 0.633361
|
a3ed4e95cb9a33dd3d47bfc975e9066c2d1b4dda
| 7,279
|
py
|
Python
|
monoscene/data/utils/helpers.py
|
Teaksters/MonoScene
|
0a5803052b54e57eb98556e53d3bf45be890b269
|
[
"Apache-2.0"
] | 122
|
2021-12-02T05:18:29.000Z
|
2022-03-31T13:07:16.000Z
|
monoscene/data/utils/helpers.py
|
Teaksters/MonoScene
|
0a5803052b54e57eb98556e53d3bf45be890b269
|
[
"Apache-2.0"
] | 16
|
2021-12-17T02:59:21.000Z
|
2022-03-25T10:47:46.000Z
|
monoscene/data/utils/helpers.py
|
Teaksters/MonoScene
|
0a5803052b54e57eb98556e53d3bf45be890b269
|
[
"Apache-2.0"
] | 17
|
2021-12-09T11:34:16.000Z
|
2022-03-17T03:26:23.000Z
|
import numpy as np
import monoscene.data.utils.fusion as fusion
import torch
def compute_CP_mega_matrix(target, is_binary=False):
"""
Parameters
---------
target: (H, W, D)
contains voxels semantic labels
is_binary: bool
if True, return binary voxels relations else return 4-way relations
"""
label = target.reshape(-1)
label_row = label
N = label.shape[0]
super_voxel_size = [i//2 for i in target.shape]
if is_binary:
matrix = np.zeros((2, N, super_voxel_size[0] * super_voxel_size[1] * super_voxel_size[2]), dtype=np.uint8)
else:
matrix = np.zeros((4, N, super_voxel_size[0] * super_voxel_size[1] * super_voxel_size[2]), dtype=np.uint8)
for xx in range(super_voxel_size[0]):
for yy in range(super_voxel_size[1]):
for zz in range(super_voxel_size[2]):
col_idx = xx * (super_voxel_size[1] * super_voxel_size[2]) + yy * super_voxel_size[2] + zz
label_col_megas = np.array([
target[xx * 2, yy * 2, zz * 2],
target[xx * 2 + 1, yy * 2, zz * 2],
target[xx * 2, yy * 2 + 1, zz * 2],
target[xx * 2, yy * 2, zz * 2 + 1],
target[xx * 2 + 1, yy * 2 + 1, zz * 2],
target[xx * 2 + 1, yy * 2, zz * 2 + 1],
target[xx * 2, yy * 2 + 1, zz * 2 + 1],
target[xx * 2 + 1, yy * 2 + 1, zz * 2 + 1],
])
label_col_megas = label_col_megas[label_col_megas != 255]
for label_col_mega in label_col_megas:
label_col = np.ones(N) * label_col_mega
if not is_binary:
matrix[0, (label_row != 255) & (label_col == label_row) & (label_col != 0), col_idx] = 1.0 # non non same
matrix[1, (label_row != 255) & (label_col != label_row) & (label_col != 0) & (label_row != 0), col_idx] = 1.0 # non non diff
matrix[2, (label_row != 255) & (label_row == label_col) & (label_col == 0), col_idx] = 1.0 # empty empty
matrix[3, (label_row != 255) & (label_row != label_col) & ((label_row == 0) | (label_col == 0)), col_idx] = 1.0 # nonempty empty
else:
matrix[0, (label_row != 255) & (label_col != label_row), col_idx] = 1.0 # diff
matrix[1, (label_row != 255) & (label_col == label_row), col_idx] = 1.0 # same
return matrix
def vox2pix(cam_E, cam_k,
vox_origin, voxel_size,
img_W, img_H,
scene_size):
"""
compute the 2D projection of voxels centroids
Parameters:
----------
cam_E: 4x4
=camera pose in case of NYUv2 dataset
=Transformation from camera to lidar coordinate in case of SemKITTI
cam_k: 3x3
camera intrinsics
vox_origin: (3,)
world(NYU)/lidar(SemKITTI) cooridnates of the voxel at index (0, 0, 0)
img_W: int
image width
img_H: int
image height
scene_size: (3,)
scene size in meter: (51.2, 51.2, 6.4) for SemKITTI and (4.8, 4.8, 2.88) for NYUv2
Returns
-------
projected_pix: (N, 2)
Projected 2D positions of voxels
fov_mask: (N,)
Voxels mask indice voxels inside image's FOV
pix_z: (N,)
Voxels'distance to the sensor in meter
"""
# Compute the x, y, z bounding of the scene in meter
vol_bnds = np.zeros((3,2))
vol_bnds[:,0] = vox_origin
vol_bnds[:,1] = vox_origin + np.array(scene_size)
# Compute the voxels centroids in lidar cooridnates
vol_dim = np.ceil((vol_bnds[:,1]- vol_bnds[:,0])/ voxel_size).copy(order='C').astype(int)
xv, yv, zv = np.meshgrid(
range(vol_dim[0]),
range(vol_dim[1]),
range(vol_dim[2]),
indexing='ij'
)
vox_coords = np.concatenate([
xv.reshape(1,-1),
yv.reshape(1,-1),
zv.reshape(1,-1)
], axis=0).astype(int).T
# Project voxels'centroid from lidar coordinates to camera coordinates
cam_pts = fusion.TSDFVolume.vox2world(vox_origin, vox_coords, voxel_size)
cam_pts = fusion.rigid_transform(cam_pts, cam_E)
# Project camera coordinates to pixel positions
projected_pix = fusion.TSDFVolume.cam2pix(cam_pts, cam_k)
pix_x, pix_y = projected_pix[:, 0], projected_pix[:, 1]
# Eliminate pixels outside view frustum
pix_z = cam_pts[:, 2]
fov_mask = np.logical_and(pix_x >= 0,
np.logical_and(pix_x < img_W,
np.logical_and(pix_y >= 0,
np.logical_and(pix_y < img_H,
pix_z > 0))))
return projected_pix, fov_mask, pix_z
def compute_local_frustum(pix_x, pix_y, min_x, max_x, min_y, max_y, pix_z):
valid_pix = np.logical_and(pix_x >= min_x,
np.logical_and(pix_x < max_x,
np.logical_and(pix_y >= min_y,
np.logical_and(pix_y < max_y,
pix_z > 0))))
return valid_pix
def compute_local_frustums(projected_pix, pix_z, target, img_W, img_H, dataset, n_classes, size=4):
"""
Compute the local frustums mask and their class frequencies
Parameters:
----------
projected_pix: (N, 2)
2D projected pix of all voxels
pix_z: (N,)
Distance of the camera sensor to voxels
target: (H, W, D)
Voxelized sematic labels
img_W: int
Image width
img_H: int
Image height
dataset: str
="NYU" or "kitti" (for both SemKITTI and KITTI-360)
n_classes: int
Number of classes (12 for NYU and 20 for SemKITTI)
size: int
determine the number of local frustums i.e. size * size
Returns
-------
frustums_masks: (n_frustums, N)
List of frustums_masks, each indicates the belonging voxels
frustums_class_dists: (n_frustums, n_classes)
Contains the class frequencies in each frustum
"""
H, W, D = target.shape
ranges = [(i * 1.0/size, (i * 1.0 + 1)/size) for i in range(size)]
local_frustum_masks = []
local_frustum_class_dists = []
pix_x, pix_y = projected_pix[:, 0], projected_pix[:, 1]
for y in ranges:
for x in ranges:
start_x = x[0] * img_W
end_x = x[1] * img_W
start_y = y[0] * img_H
end_y = y[1] * img_H
local_frustum = compute_local_frustum(pix_x, pix_y, start_x, end_x, start_y, end_y, pix_z)
if dataset == "NYU":
mask = (target != 255) & np.moveaxis(local_frustum.reshape(60, 60, 36), [0, 1, 2], [0, 2, 1])
elif dataset == "kitti":
mask = (target != 255) & local_frustum.reshape(H, W, D)
local_frustum_masks.append(mask)
classes, cnts = np.unique(target[mask], return_counts=True)
class_counts = np.zeros(n_classes)
class_counts[classes.astype(int)] = cnts
local_frustum_class_dists.append(class_counts)
frustums_masks, frustums_class_dists = np.array(local_frustum_masks), np.array(local_frustum_class_dists)
return frustums_masks, frustums_class_dists
| 39.134409
| 152
| 0.568347
|
8715a37d39c0279dc2d14343681469cf6113521f
| 7,840
|
py
|
Python
|
contacter/contacts_book/views.py
|
Gordi91/contacts-book
|
b6d46d9a4f4beba46638fce4bbd0ebaefa975406
|
[
"MIT"
] | null | null | null |
contacter/contacts_book/views.py
|
Gordi91/contacts-book
|
b6d46d9a4f4beba46638fce4bbd0ebaefa975406
|
[
"MIT"
] | null | null | null |
contacter/contacts_book/views.py
|
Gordi91/contacts-book
|
b6d46d9a4f4beba46638fce4bbd0ebaefa975406
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, get_object_or_404, redirect
from django.views import View
from .models import Person, Address, Group, Phone, Email
from .forms import PersonForm, AddressForm, PhoneForm, EmailForm, GroupForm, MembersForm
class ShowPeople(View):
def get(self, request):
template_name = 'contacts_book/show_people.html'
people = Person.objects.all()
return render(request, template_name, {'people': people})
class DeletePerson(View):
def get(self, request, id):
Person.objects.get(pk=id).delete()
return redirect('show_people')
class NewPerson(View):
def get(self, request):
template_name = 'contacts_book/new_person.html'
form = PersonForm()
return render(request, template_name, {
'form': form,
})
def post(self, request):
form = PersonForm(request.POST, request.FILES)
if form.is_valid():
form.save()
person_id = form.instance.id
return redirect('show_person', id=person_id)
class ModifyPerson(View):
def get(self, request, id):
template_name = 'contacts_book/new_person.html'
person = get_object_or_404(Person, pk=id)
form = PersonForm(initial={
'first_name': person.first_name,
'surname': person.surname,
'description': person.description,
'avatar': person.avatar,
'groups': [group.id for group in person.groups.all()],
})
return render(request, template_name, {
'form': form,
'person': person,
})
def post(self, request, id):
instance = get_object_or_404(Person, pk=id)
form = PersonForm(request.POST, request.FILES, instance=instance)
if form.is_valid():
form.save()
return redirect('show_person', id=id)
class ShowPerson(View):
def get(self, request, id):
template_name = 'contacts_book/show_person.html'
person = Person.objects.get(pk=id)
phones = Phone.objects.filter(person=person)
emails = Email.objects.filter(person=person)
return render(request, template_name, {
'person': person,
'phones': phones,
'emails': emails,
})
class AddAddress(View):
def get(self, request, id):
template_name = 'contacts_book/add_address.html'
person = Person.objects.get(pk=id)
if Address.objects.filter(person=person):
address = Address.objects.filter(person=person)[0]
form = AddressForm(initial={
'town': address.town,
'street': address.street,
'house_number': address.house_number,
'apartment_number': address.apartment_number,
})
else:
form = AddressForm()
return render(request, template_name, {
'form': form,
})
def post(self, request, id):
form = AddressForm(request.POST)
if form.is_valid():
form.save()
address = Address.objects.get(pk=form.instance.id)
person = get_object_or_404(Person, pk=id)
person.address = address
person.save()
return redirect('modify_person', id=id)
class AddPhoneNumber(View):
def get(self, request, id):
template_name = 'contacts_book/add_phone_number.html'
form = PhoneForm()
return render(request, template_name, {
'form': form,
})
def post(self, request, id):
form = PhoneForm(request.POST)
person = get_object_or_404(Person, pk=id)
form.save(commit=False)
if form.is_valid:
obj = form.save(commit=False)
obj.person = person
obj.save()
return redirect('modify_person', id=id)
class AddEMail(View):
def get(self, request, id):
template_name = 'contacts_book/add_email.html'
form = EmailForm()
return render(request, template_name, {
'form': form,
})
def post(self, request, id):
form = EmailForm(request.POST)
person = get_object_or_404(Person, pk=id)
if form.is_valid:
obj = form.save(commit=False)
obj.person = person
obj.save()
return redirect('modify_person', id=id)
class NewGroup(View):
def get(self, request):
template_name = 'contacts_book/new_group.html'
form = GroupForm()
return render(request, template_name, {
'form': form,
})
def post(self, request):
form = GroupForm(request.POST)
if form.is_valid():
form.save()
group_id = form.instance.id
return redirect('show_group', id=group_id)
class ModifyGroup(View):
def get(self, request, id):
template_name = 'contacts_book/new_group.html'
group = get_object_or_404(Group, pk=id)
form = GroupForm(initial={
'name': group.name,
})
return render(request, template_name, {
'form': form,
'group_id': group.id,
})
def post(self, request, id):
instance = get_object_or_404(Group, pk=id)
form = GroupForm(request.POST, instance=instance)
if form.is_valid():
form.save()
return redirect('show_group', id=id)
class ShowGroups(View):
def get(self, request):
template_name = 'contacts_book/show_groups.html'
groups = Group.objects.all()
return render(request, template_name, {'groups': groups})
def post(self, request):
template_name = 'contacts_book/show_groups.html'
groups = Group.objects.all()
if request.POST.get('surname'):
surname = request.POST.get('surname')
groups = groups.filter(person__surname__icontains=surname)
message = ''
if not groups.exists():
message = "No groups found for given search criteria"
return render(request, template_name, {
'groups': groups,
'message': message,
})
class ShowGroup(View):
def get(self, request, id):
template_name = 'contacts_book/show_group.html'
group = Group.objects.get(pk=id)
return render(request, template_name, {'group': group})
class DeletePersonFromGroup(View):
def get(self, request, group_id, person_id):
person = get_object_or_404(Person, pk=person_id)
person.groups.remove(Group.objects.get(pk=group_id))
return redirect('show_group', id=group_id)
class DeleteGroup(View):
def get(self, request, id):
Group.objects.get(pk=id).delete()
return redirect('show_groups')
class GroupMembers(View):
def get(self, request, id):
template_name = 'contacts_book/group_members.html'
group = Group.objects.get(pk=id)
members_form = MembersForm(initial={
'members': [person.id for person in group.person_set.all()],
})
return render(request, template_name, {
'members_form': members_form,
'group': group,
})
def post(self, request, id):
group = Group.objects.get(pk=id)
members = request.POST.getlist('members')
for person_id in members:
Person.objects.get(pk=person_id).groups.add(group)
return redirect('show_group', id=id)
class DeleteEmailFromPerson(View):
def get(self, request, email_id, person_id):
get_object_or_404(Email, pk=email_id).delete()
return redirect('modify_person', id=person_id)
class DeletePhoneFromPerson(View):
def get(self, request, phone_id, person_id):
get_object_or_404(Phone, pk=phone_id).delete()
return redirect('modify_person', id=person_id)
| 30.988142
| 88
| 0.60523
|
d6cd2d1096877f1045a33a1be85a794170d43493
| 512
|
py
|
Python
|
tpDcc/dccs/mobu/__version__.py
|
tpDcc/tpDcc-dccs-mobu
|
1e28dfa95d72aeaccf4c916259b7e07380758cc7
|
[
"MIT"
] | null | null | null |
tpDcc/dccs/mobu/__version__.py
|
tpDcc/tpDcc-dccs-mobu
|
1e28dfa95d72aeaccf4c916259b7e07380758cc7
|
[
"MIT"
] | null | null | null |
tpDcc/dccs/mobu/__version__.py
|
tpDcc/tpDcc-dccs-mobu
|
1e28dfa95d72aeaccf4c916259b7e07380758cc7
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Version module for tpDcc-dccs-mobu
"""
from __future__ import print_function, division, absolute_import
__author__ = "Tomas Poveda"
__license__ = "MIT"
__maintainer__ = "Tomas Poveda"
__email__ = "tpovedatd@gmail.com"
__version__ = None
def get_version():
global __version__
if __version__:
return __version__
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
return __version__
| 18.285714
| 64
| 0.712891
|
9722dfb201da0741f36291399df7a458d2110229
| 7,795
|
py
|
Python
|
pet_v_vrsto.py
|
SamoFMF/Five-in-a-row
|
a60fd174de37db9a3b7f3ed458d7345220cda400
|
[
"MIT"
] | null | null | null |
pet_v_vrsto.py
|
SamoFMF/Five-in-a-row
|
a60fd174de37db9a3b7f3ed458d7345220cda400
|
[
"MIT"
] | null | null | null |
pet_v_vrsto.py
|
SamoFMF/Five-in-a-row
|
a60fd174de37db9a3b7f3ed458d7345220cda400
|
[
"MIT"
] | null | null | null |
from ai import AIPlayer
import tkinter as tk # GUI
from logika import *
from minimax import *
from clovek import Clovek
from racunalnik import Racunalnik
##################################
# GRAFICNI / UPORABNISKI VMESNIK #
##################################
MIN_SIRINA = 500
MIN_VISINA = 555
ZVP = 100
class Gui:
# Definirajmo si tage za elemente platna
TAG_FIGURA = "figura" # figure igralcev
TAG_OKVIR = "okvir" # Crte, ki predstavljajo igralno povrsino oz. njen okvir
TAG_GUMB = "gumb" # Gumbi
TAG_PETKA = "petka" # Zmagovalna petka
BARVE = {IGRALEC_1: "red", IGRALEC_2: "blue"} # barve igralcev
def __init__(self, master):
self.igralec_1 = None # Objekt, ki igra IGRALCA 1
self.igralec_2 = None # Objekt, ki igra IGRALCA 2
self.igra = None # Objekt, ki predstavlja igro
self.velikost_polja = ZVP # Velikost polja
self.velikost_gap = self.velikost_polja / 20 # Razdalja med okvirjem in figuro
# Ce uporabnik zapre okno se naj klice self.zapri_okno
master.protocol("WM_DELETE_WINDOW", lambda: self.zapri_okno(master))
# Glavni menu
menu = tk.Menu(master)
master.config(menu=menu)
# Podmenu Igra
menu_igra = tk.Menu(menu, tearoff=0)
menu.add_cascade(label="Igra", menu=menu_igra)
# Gumb za zacetek nove igre
menu_igra.add_command(label="Nova igra",
command=self.zacni_igro)
# Podmenu Moznosti
menu_moznosti = tk.Menu(menu, tearoff=0)
menu.add_cascade(label="Moznosti", menu=menu_moznosti)
####################
# IGRALNA POVRSINA #
####################
self.platno = tk.Canvas(master,
width=(NUM_COLS+1) * self.velikost_polja,
height=(NUM_ROWS+1) * self.velikost_polja)
self.platno.pack(fill=tk.BOTH, expand=1, side=tk.RIGHT)
# Narisemo crte
self.narisi_okvir()
# Dolocimo, kaj uporabnikovi kliki naredijo
self.platno.bind("<Button-1>", self.platno_klik)
# Zacnemo igro
self.zacni_igro()
def zapri_okno(self, master):
'''Ta metoda se pokliče, ko uporabnik zapre aplikacijo.'''
# Igralce najprej ustavimo
self.prekini_igralce()
# Zapremo okno
master.destroy()
def prekini_igralce(self):
'''Sporoci igralcem, da morajo nehati razmisljati.'''
if self.igralec_1:
self.igralec_1.prekini()
if self.igralec_2:
self.igralec_2.prekini()
def narisi_okvir(self):
'''Narise okvir igralne povrsine.'''
self.platno.delete(Gui.TAG_OKVIR)
d = self.velikost_polja
xmin = d/2
xmax = xmin + NUM_COLS * d
ymin = d/2
ymax = ymin + NUM_ROWS * d
for i in range(NUM_ROWS+1):
self.platno.create_line(xmin, ymin + i*d, xmax, ymin + i*d)
for i in range(NUM_COLS+1):
self.platno.create_line(xmin + i*d, ymin, xmin + i*d, ymax)
def platno_klik(self, event):
x,y = event.x, event.y
d = self.velikost_polja
if (x < d/2) or (x > d/2 + NUM_COLS * d) or (y < d/2) or (y > d/2 + NUM_ROWS * d):
# Klik je izven igralne povrsine
return
else:
col = int((x-d/2) // d) # V katerem stolpcu smo
p = col + 1 # Poteza do predznaka natancno
row = NUM_ROWS - 1 - int((y-d/2) // d)
if self.igra.board[col][row] != PRAZNO:
# Polje je ze zasedeno
return
elif row == 0:
pass
elif row == NUM_ROWS - 1:
p *= -1
elif self.igra.board[col][row-1] != PRAZNO:
pass
elif self.igra.board[col][(row+1) % NUM_ROWS] != PRAZNO:
p *= -1
# print("Poteza:", p)
# Narocimo igralcu, da odigra kliknjeno potezo
if self.igra.na_potezi == IGRALEC_1:
self.igralec_1.klik(p)
elif self.igra.na_potezi == IGRALEC_2:
self.igralec_2.klik(p)
def povleci_potezo(self, p):
'''Odigra in narise potezo.'''
igralec = self.igra.na_potezi
zmagovalec, petka = self.igra.odigraj_potezo(p)
if zmagovalec is None:
# Poteza ni veljavna
return
self.narisi_potezo(p, Gui.BARVE[igralec])
if zmagovalec == NI_KONEC:
if self.igra.na_potezi == IGRALEC_1:
self.igralec_1.igraj()
elif self.igra.na_potezi == IGRALEC_2:
self.igralec_2.igraj()
else:
self.koncaj_igro(zmagovalec, petka)
def narisi_potezo(self, p, col):
d = self.velikost_polja
x = abs(p) - 1
y = self.igra.vrstice_plus[x] - 1 if p>0 else -(self.igra.vrstice_minus[x]) % NUM_ROWS
xcenter = (x+1) * d
ycenter = (NUM_ROWS-y) * d
gap = self.velikost_gap
self.platno.create_oval(xcenter-d/2+gap, ycenter-d/2+gap,
xcenter+d/2-gap, ycenter+d/2-gap,
fill=col,
width=0,
tag=Gui.TAG_FIGURA)
self.platno.create_text(xcenter, ycenter,
text=str(self.igra.stevilo_potez),
font=("Purisa", 40),
tag=Gui.TAG_FIGURA)
def obarvaj_petko(self, petka, col):
d = self.velikost_polja
for x,y in petka:
ycoord = NUM_ROWS-1-y
self.platno.create_rectangle(d/2 + x*d, d/2 + ycoord*d,
d/2 + (x+1)*d, d/2 + (ycoord+1)*d,
outline=col,
width=4,
tag=Gui.TAG_PETKA)
def zacni_igro(self):
'''Zacne novo/naslednjo igro. Nastavi igralce, tip igre, rezultat itd.'''
self.prekini_igralce()
self.igralec_1 = Clovek(self)
# self.igralec_2 = Clovek(self)
# self.igralec_1 = Racunalnik(self, AIPlayer("ai", 0, 0.9, 0.1))
# self.igralec_1.algoritem.nalozi_strategijo("ai_100k_p1")
# self.igralec_1 = Racunalnik(self, Minimax(4, "alphabeta"))
self.igralec_2 = Racunalnik(self, Minimax(4, "negamax_memo"))
# # self.igralec_1.algoritem.vrednost_polozaja = self.igralec_1.algoritem.vrednost_polozaja_old
# self.igralec_2.algoritem.vrednost_polozaja = self.igralec_2.algoritem.vrednost_polozaja_old
# self.igralec_1.algoritem.bias = 1
# Pobrisemo odigrane poteze
self.platno.delete(Gui.TAG_FIGURA)
self.platno.delete(Gui.TAG_PETKA)
# Ustvarimo novo igro
self.igra = Logika()
# Preverimo, kdo je na potezi
if self.igra.na_potezi == IGRALEC_1:
self.igralec_1.igraj()
elif self.igra.na_potezi == IGRALEC_2:
self.igralec_2.igraj()
def koncaj_igro(self, zmagovalec, petka):
if petka is not None:
self.obarvaj_petko(petka, Gui.BARVE[zmagovalec])
######################################################################
## Glavni program
if __name__ == "__main__":
# Naredimo glavno okno in nastavimo ime
root = tk.Tk()
root.title("Pet v vrsto")
# Nastavimo najmanjso velikost okna
root.minsize(int(MIN_SIRINA), int(MIN_VISINA))
# Naredimo objekt Gui in ga shranimo, sicer ga Python zbrise
aplikacija = Gui(root)
# Kontrolo prepustimo glavnemu oknu
# Funkcija mainloop neha delovati, ko okno zapremo
root.mainloop()
| 35.431818
| 103
| 0.5483
|
15137028b38c745b4ddd8e9bffdb65e618adf00a
| 3,168
|
py
|
Python
|
reentry/config.py
|
giovannipizzi/reentry
|
8491b7348a359f5a844bb283ea9450c027e92580
|
[
"MIT"
] | 5
|
2017-11-28T14:57:53.000Z
|
2019-01-16T08:48:10.000Z
|
reentry/config.py
|
giovannipizzi/reentry
|
8491b7348a359f5a844bb283ea9450c027e92580
|
[
"MIT"
] | 49
|
2017-04-11T11:18:58.000Z
|
2021-02-10T23:06:20.000Z
|
reentry/config.py
|
giovannipizzi/reentry
|
8491b7348a359f5a844bb283ea9450c027e92580
|
[
"MIT"
] | 8
|
2017-06-16T17:01:33.000Z
|
2021-02-09T10:28:03.000Z
|
"""Find and read user settings."""
import os
import sys
import hashlib
import platform
try:
# prefer the backport for Python <3.5
from pathlib2 import Path
except ImportError:
from pathlib import Path
import six
from six.moves import configparser
__all__ = ['find_config', 'get_config', 'get_datafile']
def _get_default_config_dir():
return Path(os.getenv('XDG_CONFIG_HOME', '~/.config')).expanduser().joinpath('reentry')
def find_config():
"""
Search for a config file in the following places and order:
* <HOME>/.reentryrc
* <HOME>/.config/reentry/config
"""
rc_file = Path.home().joinpath('.reentryrc')
config_file = _get_default_config_dir().joinpath('config')
# pylint: disable=no-else-return
if rc_file.exists(): # pylint: disable=no-member
return rc_file
elif config_file.exists(): # pylint: disable=no-member
return config_file
return rc_file
def make_config_parser(*args, **kwargs):
"""Get the correct ConfigParser class depending on python version."""
# pylint: disable=no-else-return
if six.PY2:
return configparser.SafeConfigParser(*args, **kwargs)
elif six.PY3:
return configparser.ConfigParser(*args, **kwargs)
return None
def get_config(config_file_name=str(find_config())):
"""Create config parser with defaults and read in the config file."""
default_config_dir = _get_default_config_dir()
default_config_values = {'datadir': str(default_config_dir.joinpath('data')), 'data_filename': hashed_data_file_name()}
parser = make_config_parser(default_config_values)
parser.add_section('general')
parser.read([config_file_name])
env_datadir = os.getenv('REENTRY_DATADIR')
if env_datadir:
env_datadir_path = Path(env_datadir)
if env_datadir_path.exists() and not env_datadir_path.is_dir(): # pylint: disable=no-member
raise ValueError('environment variable $REENTRY_DATADIR={} exists, but is not a directory'.format(env_datadir))
parser.set('general', 'datadir', str(env_datadir_path))
env_data_filename = os.getenv('REENTRY_DATA_FILENAME')
if env_data_filename:
parser.set('general', 'data_filename', env_data_filename)
return parser
def hashed_data_file_name():
"""Find the path to the reentry executable and mangle it into a file name."""
fname = 'u{bin_dir}_{impl}-{ver}'.format(bin_dir=Path(sys.executable).resolve().parent,
impl=platform.python_implementation(),
ver=platform.python_version())
path_hash = hashlib.sha256(fname.encode('utf-8'))
return path_hash.hexdigest()
def get_datafile():
"""Create the path to the data file used to store entry points."""
config = get_config()
pkg_path_filename = config.get('general', 'data_filename')
datafile = Path(config.get('general', 'datadir')).joinpath(pkg_path_filename)
if not datafile.exists(): # pylint: disable=no-member
datafile.parent.mkdir(parents=True, exist_ok=True)
datafile.write_text(u'{}')
return str(datafile)
| 34.064516
| 123
| 0.685606
|
443101ed7f8cb832968a705d41c547c80ecd2032
| 1,911
|
py
|
Python
|
bin/authors.py
|
Lamcloud/cloudmesh-manual
|
6d94270e4a90f96f3906caa78ac24bf7f4468608
|
[
"Apache-2.0"
] | null | null | null |
bin/authors.py
|
Lamcloud/cloudmesh-manual
|
6d94270e4a90f96f3906caa78ac24bf7f4468608
|
[
"Apache-2.0"
] | null | null | null |
bin/authors.py
|
Lamcloud/cloudmesh-manual
|
6d94270e4a90f96f3906caa78ac24bf7f4468608
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import subprocess
import textwrap
debug = False
names = []
for d in [".",
"../cloudmesh-installer",
"../cloudmesh-inventory",
"../cloudmesh-gui",
"../cloudmesh-configuration",
"../cloudmesh-cmd5",
"../cloudmesh-aws",
"../cloudmesh-azure",
"../cloudmesh-openstack",
"../cloudmesh-google",
"../cloudmesh-oracle",
"../cloudmesh-common",
"../cloudmesh-sys",
"../cloudmesh-openapi",
"../cloudmesh-emr",
"../cloudmesh-cloud",
"../cloudmesh-storage"]:
try:
lines = subprocess.check_output(["git", "shortlog", "-s"], cwd=d).decode('ascii', 'ignore').split("\n")
for line in lines[:-1]:
if debug:
print (d, line)
names.append("*" + line.strip().split("\t")[1] + "*")
except:
pass
names = list(set(names))
names.sort()
# names = " ".join(names)
# names = names.replace("ElectricErudite", "Andrew Holland")
name_string = '\n '.join(textwrap.wrap(', '.join(names), 79, initial_indent=" "))
'''
print("# Contributors")
print()
msg = """Contributors are sorted by the first letter of their combined
Firstname and Lastname and if not available by their github ID.
Please, note that the authors are identified through git logs in
addition to some contributors added by hand. The git repository from
which this document is derived contains more than the documents
included in this document. Thus not everyone in this list may have
directly contributed to this document. However if you find someone
missing that has contributed (they may not have used this particular
git) please let us know. We will add you.
The contributors that we are aware of include:
"""
print("\n".join(textwrap.wrap(msg, 79)))
print()
'''
print()
print (name_string)
print()
| 25.48
| 111
| 0.612245
|
7d958a679edff4356242144104c784c25f23f142
| 257
|
py
|
Python
|
player.py
|
ColinVDH/blokus
|
a2c7f6f7b65653b0fd942c181eae3944fb2ed7d8
|
[
"MIT"
] | 1
|
2020-01-10T00:26:45.000Z
|
2020-01-10T00:26:45.000Z
|
player.py
|
ColinVDH/blokus
|
a2c7f6f7b65653b0fd942c181eae3944fb2ed7d8
|
[
"MIT"
] | null | null | null |
player.py
|
ColinVDH/blokus
|
a2c7f6f7b65653b0fd942c181eae3944fb2ed7d8
|
[
"MIT"
] | null | null | null |
from constants import STARTING_PLACES
class Player:
def __init__(self, index, is_human=False):
self.index = index
self.is_human = is_human
self.starting_place = STARTING_PLACES[index]
def get_move(self, board):
pass
| 25.7
| 52
| 0.677043
|
5559b6609f68ec644df7cf56faf9a39b33b85771
| 256
|
py
|
Python
|
src/ebay_rest/api/sell_compliance/api/__init__.py
|
gbm001/ebay_rest
|
077d3478423ccd80ff35e0361821d6a11180bc54
|
[
"MIT"
] | 3
|
2021-12-12T04:28:03.000Z
|
2022-03-10T03:29:18.000Z
|
src/ebay_rest/api/sell_compliance/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 33
|
2021-06-16T20:44:36.000Z
|
2022-03-30T14:55:06.000Z
|
src/ebay_rest/api/sell_compliance/api/__init__.py
|
jdavv/ebay_rest
|
20fc88c6aefdae9ab90f9c1330e79abddcd750cd
|
[
"MIT"
] | 7
|
2021-06-03T09:30:23.000Z
|
2022-03-08T19:51:33.000Z
|
from __future__ import absolute_import
# flake8: noqa
# import apis into api package
from ...sell_compliance.api.listing_violation_api import ListingViolationApi
from ...sell_compliance.api.listing_violation_summary_api import ListingViolationSummaryApi
| 32
| 91
| 0.859375
|
bde905347d7089de94d311f9111af809f6367fe1
| 804
|
py
|
Python
|
examples/timon.py
|
surycat/nyuki-legacy
|
9ab3a212f2ce34b032984c712c87eb2326bd3960
|
[
"Apache-2.0"
] | 8
|
2016-08-08T12:09:16.000Z
|
2018-08-24T02:32:06.000Z
|
examples/timon.py
|
surycat/nyuki-legacy
|
9ab3a212f2ce34b032984c712c87eb2326bd3960
|
[
"Apache-2.0"
] | 16
|
2015-10-06T10:24:53.000Z
|
2018-01-23T18:35:37.000Z
|
examples/timon.py
|
surycat/nyuki-legacy
|
9ab3a212f2ce34b032984c712c87eb2326bd3960
|
[
"Apache-2.0"
] | 9
|
2015-09-30T15:00:44.000Z
|
2018-04-05T21:25:48.000Z
|
"""
This is 'timon'
"""
import logging
from nyuki import Nyuki, resource, Response
log = logging.getLogger(__name__)
@resource('/message', versions=['v1'])
class Message:
async def get(self, request):
return Response({'message': self.nyuki.message})
async def put(self, request):
request = await request.json()
self.nyuki.message = request['message']
log.info("message updated to '%s'", self.nyuki.message)
await self.nyuki.bus.publish({'order': 'go pumbaa!'})
# No 'return' implies 200 Ok
class Timon(Nyuki):
HTTP_RESOURCES = Nyuki.HTTP_RESOURCES + [Message]
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.message = 'hello world!'
if __name__ == '__main__':
nyuki = Timon()
nyuki.start()
| 21.72973
| 63
| 0.633085
|
1cee275788a3ea8bc9faa3f0dab4ac576f10c46e
| 4,619
|
py
|
Python
|
src/count_breakpoints_at_sites.py
|
esaskar/jsva
|
d614039d807d11a7dc7651fa81dffc2c2829a9f9
|
[
"MIT"
] | null | null | null |
src/count_breakpoints_at_sites.py
|
esaskar/jsva
|
d614039d807d11a7dc7651fa81dffc2c2829a9f9
|
[
"MIT"
] | 1
|
2020-06-13T02:36:31.000Z
|
2020-06-13T02:36:31.000Z
|
src/count_breakpoints_at_sites.py
|
esaskar/jsva
|
d614039d807d11a7dc7651fa81dffc2c2829a9f9
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys, subprocess, os, argparse
p = argparse.ArgumentParser()
p.add_argument("-f", "--flank", default = 10000, type = int)
args = p.parse_args()
SAMPLES = "data/samples_with_cohesin_mutation"
#COHESIN_SITES = "data/CTCF_RAD21_intersect.bed"
#COHESIN_SITES = "data/occupied_cohesin_sites_with_mutation.bed"
COHESIN_SITES = "data/CTCF_RAD21_intersect_mutated.bed"
SITE_MUTATIONS = "data/somatic_mutations_occupied_cohesin.bed"
BREAKPOINTS = "out/somatic_breakpoints/sorted/"
o1 = open("breakpoints_at_mutated_sites.txt", "w")
o2 = open("breakpoints_at_nonmutated_sites.txt", "w")
obpcm = open("breakpoint_coords_at_mutated_sites.txt", "w")
obpcn = open("breakpoint_coords_at_nonmutated_sites.txt", "w")
# 0. read samples
samples = map(lambda x: x.strip(), open(SAMPLES).readlines())
samples.sort()
print "%d samples from %s" % (len(samples), SAMPLES)
# map samples to somatic breakpoint files
sample_breakpoints = {}
for s in samples:
fn = "%s/%s.gz" % (BREAKPOINTS, s)
if os.path.exists(fn):
sample_breakpoints[s] = fn
continue
fn = "%s/%s_1.gz" % (BREAKPOINTS, s)
if os.path.exists(fn):
sample_breakpoints[s] = fn
continue
print "Can't find", s
exit()
class Site:
def add_sample(self, sample):
self.samples.add(sample)
def __init__(self, chrom, start, stop, orient):
self.chrom = chrom
self.start = start
self.stop = stop
self.orient = orient
self.samples = set()
def __str__(self):
return "%s:%d-%d:%s" % (self.chrom, self.start, self.stop, self.orient)
# 1. Collect occupied cohesin sites
pos_to_site = {}
for s in open(COHESIN_SITES):
v = s.strip().split("\t")
chrom, start, stop, tag, value, orientation = v
start, stop = int(start), int(stop)
site = Site(chrom, start, stop, orientation)
key = "%s:%d-%d" % (chrom, start, stop)
pos_to_site[key] = site
print "%d occupied cohesin sites from %s" % (len(pos_to_site), COHESIN_SITES)
# 2. Identify samples+sites with mutation
n = 0
for s in open(SITE_MUTATIONS):
if s.startswith("#"):
continue
chrom, start, end, sample = s.strip().split("\t")
start, end = int(start), int(end)
key = "%s:%d-%d" % (chrom, start, end)
if key not in pos_to_site:
print "site", key, "not found"
exit()
pos_to_site[key].add_sample(sample)
n += 1
print "%d mutations in sites from %s" % (n, SITE_MUTATIONS)
#print "%d/%d mutated sites" % ()
no_mut = mut = 0
for k in pos_to_site:
if len(pos_to_site[k].samples) == 0:
no_mut += 1
else:
mut += 1
print "%d/%d sites mutated in >0 samples" % (mut, no_mut + mut)
# 3. Count SV/CNV breakpoints near samples+sites
def get_breakpoints_in_neighborhood(site, sample, flank):
chrom = site.chrom
start = site.start - flank
end = site.stop + flank
s = subprocess.check_output("tabix %s %s:%d-%d" % (sample_breakpoints[sample], chrom, start, end), shell = True)
v = s.strip().split("\n")
n = len(s.strip().split("\n")) - 1
assert(n >= 0)
if len(v) <= 1:
s = None
else:
s = []
for vals in v:
chrom, pos, svtype, reads, qual = vals.split("\t")
pos = int(pos)
# tpos > 0 iff breakpoint to 3' of site start
if site.orient == "+":
tpos = pos - site.start
elif site.orient == "-":
tpos = site.stop - pos
s.append("\t".join(map(str, [chrom, pos, tpos, svtype, reads, qual, sample, site.orient])))
s = "%s\n" % ("\n".join(s))
return n, s
mutated_site_breakpoints = []
nonmutated_site_breakpoints = []
keys = pos_to_site.keys()
keys.sort()
for i, k in enumerate(keys):
site = pos_to_site[k]
nm = mm = 0
totaln = totalm = 0
for sample in samples:
#print sample, sample in site.samples
n, bps = get_breakpoints_in_neighborhood(site, sample, args.flank)
# print sample, sample in site.samples, "%d breakpoints" % (n)
if sample in site.samples:
mutated_site_breakpoints.append(n)
if bps != None:
obpcm.write(bps)
mm += n
totalm += 1
else:
nonmutated_site_breakpoints.append(n)
if bps != None:
obpcn.write(bps)
nm += n
totaln += 1
print "%d/%d" % (i, len(keys)), k, nm, totaln, mm, totalm
# if i > 1000:
# break
o1.write("\n".join(map(str, mutated_site_breakpoints)))
o2.write("\n".join(map(str, nonmutated_site_breakpoints)))
| 31.636986
| 116
| 0.609006
|
1db31864ea553553de1f72fbb78f11ab32026ede
| 9,040
|
py
|
Python
|
pai-management/k8sPaiLibrary/maintainlib/remove.py
|
luoch/pai
|
983326061006954bf3b99988d8698704598392e3
|
[
"MIT"
] | 2
|
2018-09-13T11:37:28.000Z
|
2018-09-13T11:39:33.000Z
|
pai-management/k8sPaiLibrary/maintainlib/remove.py
|
luoch/pai
|
983326061006954bf3b99988d8698704598392e3
|
[
"MIT"
] | null | null | null |
pai-management/k8sPaiLibrary/maintainlib/remove.py
|
luoch/pai
|
983326061006954bf3b99988d8698704598392e3
|
[
"MIT"
] | 1
|
2018-09-13T11:37:31.000Z
|
2018-09-13T11:37:31.000Z
|
# Copyright (c) Microsoft Corporation
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
# to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import yaml
import os
import sys
import subprocess
import jinja2
import argparse
import paramiko
import common
import time
import logging
import logging.config
from . import common as pai_common
package_directory_remove = os.path.dirname(os.path.abspath(__file__))
class remove:
"""
An class to remove the node from current pai's k8s cluster.
"""
def __init__(self, cluster_config, node_config, clean):
self.logger = logging.getLogger(__name__)
self.cluster_config = cluster_config
self.node_config = node_config
maintain_configuration_path = os.path.join(package_directory_remove, "../maintainconf/remove.yaml")
self.maintain_config = common.load_yaml_file(maintain_configuration_path)
self.clean_flag = clean
self.jobname = "remove-node"
def prepare_package(self, node_config, jobname):
common.maintain_package_wrapper(self.cluster_config, self.maintain_config, node_config, jobname)
def delete_packege(self, node_config):
common.maintain_package_cleaner(node_config)
def job_executer_clean_up_node(self):
self.logger.info("{0} job begins !".format(self.jobname))
commandline = "kubectl delete node {0}".format(self.node_config['nodename'])
common.execute_shell(
commandline,
"Failed to delete node {0}".format(self.node_config['nodename'])
)
# sftp your script to remote host with paramiko.
srcipt_package = "{0}.tar".format(self.jobname)
src_local = "parcel-center/{0}".format(self.node_config["nodename"])
dst_remote = common.get_user_dir(self.node_config)
if common.sftp_paramiko(src_local, dst_remote, srcipt_package, self.node_config) == False:
sys.exit(1)
commandline = "tar -xvf {0}.tar".format(self.jobname, self.node_config['hostip'])
if common.ssh_shell_paramiko(self.node_config, commandline) == False:
self.logger.error("Failed to uncompress {0}.tar".format(self.jobname))
sys.exit(1)
commandline = "sudo ./{0}/kubernetes-cleanup.sh".format(self.jobname)
if common.ssh_shell_with_password_input_paramiko(self.node_config, commandline) == False:
self.logger.error("Failed to cleanup the kubernetes deployment on {0}".format(self.node_config['hostip']))
sys.exit(1)
self.logger.info("Successfully running {0} job on node {1}".format(self.jobname, self.node_config["nodename"]))
def remote_host_cleaner(self, node_config, jobname):
commandline = "sudo rm -rf {0}*".format(jobname)
if common.ssh_shell_with_password_input_paramiko(node_config, commandline) == False:
sys.exit(1)
def job_execute_stop_etcd_on_target_node(self):
self.logger.info("---- package wrapper is working now! ----")
self.prepare_package(self.node_config, "stop-etcd-on-target-node")
self.logger.info("---- package wrapper's work finished ----")
self.logger.info("Begin to execute the job : stop-etcd-on-target-node.")
self.logger.info("Stop the etcd server on host [{0}]".format(self.node_config['nodename']))
script_package = "stop-etcd-on-target-node.tar"
src_local = "parcel-center/{0}".format(self.node_config["nodename"])
dst_remote = common.get_user_dir(self.node_config)
if common.sftp_paramiko(src_local, dst_remote, script_package, self.node_config) == False:
sys.exit(1)
commandline = "tar -xvf {0}.tar && sudo /bin/bash {0}/stop-etcd-server.sh".format("stop-etcd-on-target-node")
if common.ssh_shell_with_password_input_paramiko(self.node_config, commandline) == False:
sys.exit(1)
self.logger.info("Successfully stoping etcd server on node {0}".format(self.node_config["nodename"]))
if self.clean_flag == True:
self.logger.info("---- package cleaner is working now! ----")
self.delete_packege(self.node_config)
self.logger.info("---- package cleaner's work finished! ----")
self.logger.info("---- remote host cleaner is working now! ----")
self.remote_host_cleaner(self.node_config, "stop-etcd-on-target-node")
self.logger.info("---- remote host cleaning job finished! ")
def job_execute_remove_node_from_etcd_cluster(self):
# Waiting for the bad node to remove from leader.
while True:
leader_node_config = pai_common.get_etcd_leader_node(self.cluster_config)
if leader_node_config == None:
self.logger.error("Failed to find the leader node in the etcd cluster")
sys.exit(1)
if leader_node_config['nodename'] != self.node_config['nodename']:
break
self.prepare_package(leader_node_config, "remove-node-from-etcd-cluster")
self.logger.info("Begin to execute the job : remove-node-from-etcd-cluster.")
self.logger.info("Update etcd cluster on host [{0}].".format(leader_node_config['nodename']))
script_package = "remove-node-from-etcd-cluster.tar"
src_local = "parcel-center/{0}".format(leader_node_config["nodename"])
dst_remote = common.get_user_dir(leader_node_config)
if common.sftp_paramiko(src_local, dst_remote, script_package, leader_node_config) == False:
sys.exit(1)
commandline = "tar -xvf {0}.tar".format("remove-node-from-etcd-cluster")
if common.ssh_shell_with_password_input_paramiko(leader_node_config, commandline) == False:
sys.exit(1)
commandline = "sudo /bin/bash {0}/{1}.sh {2} {3}".format("remove-node-from-etcd-cluster",
"remove-member-from-etcd-cluster",
self.node_config['hostip'],
self.node_config['etcdid'])
if common.ssh_shell_with_password_input_paramiko(leader_node_config, commandline) == False:
sys.exit(1)
self.logger.info("Successfully remove target node from etcd cluster on node {0}".format(leader_node_config["nodename"]))
if self.clean_flag == True:
self.logger.info("---- package cleaner is working now! ----")
self.delete_packege(leader_node_config)
self.logger.info("---- package cleaner's work finished! ----")
self.logger.info("---- remote host cleaner is working now! ----")
self.remote_host_cleaner(leader_node_config, "remove-node-from-etcd-cluster")
self.logger.info("---- remote host cleaning job finished! ")
def run(self):
if self.node_config['k8s-role'] == 'master':
self.logger.info("The target node is master node.")
self.logger.info("Task one before cleanup the node: stop target node's etcd.")
self.job_execute_stop_etcd_on_target_node()
self.logger.info("Task two before cleanup the node: remove target node from etcd cluster")
self.job_execute_remove_node_from_etcd_cluster()
self.logger.info("---- package wrapper is working now! ----")
self.prepare_package(self.node_config, self.jobname)
self.logger.info("---- package wrapper's work finished ----")
self.job_executer_clean_up_node()
if self.clean_flag == True:
self.logger.info("---- package cleaner is working now! ----")
self.delete_packege(self.node_config)
self.logger.info("---- package cleaner's work finished! ----")
self.logger.info("---- remote host cleaner is working now! ----")
self.remote_host_cleaner(self.node_config, self.jobname)
self.logger.info("---- remote host cleaning job finished! ")
| 40.904977
| 128
| 0.663827
|
359e838b1cb759b21a23afcc4ec01edb152173a1
| 13,338
|
py
|
Python
|
AITranslator.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
AITranslator.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
AITranslator.py
|
lsldragon/AITranslator
|
70c3dbec40e6aaf250830550ab53fe93d894bae5
|
[
"MIT"
] | null | null | null |
import sys
from PyQt5.QtWidgets import *
from MainFrame import *
import json
import requests
from urllib import parse
from PyQt5.QtCore import *
from GetSource import *
from googletrans import Translator
from AboutDialog import *
from UpdateDialog import *
from Theme import *
import re
import qdarkstyle
GTransData = ""
AITransData = ""
class APP(QMainWindow, Ui_MainWindow):
def __init__(self, parent=None):
super(APP, self).__init__(parent)
self.setupUi(self)
self.center()
Theme.dark_theme(self)
self.update_label()
self.AI_button.clicked.connect(self.on_ai_translate)
self.bingButton.clicked.connect(self.on_bing)
self.type_ComboBox.currentIndexChanged.connect(
self.on_comboBox_changed)
self.type_ComboBox.setToolTip("默认 AI 翻译")
self.theme_comboBox.currentIndexChanged.connect(self.on_theme_chaneged)
self.youDaoButton.clicked.connect(self.on_yddict)
self.google_button.clicked.connect(self.on_google_translate)
self.about_button.clicked.connect(self.show_about_dialog)
self.clear_button.clicked.connect(self.clear_sourceL)
self.clipboard = QApplication.clipboard()
self.clipboard.dataChanged.connect(self.get_text_from_clipboard)
self.check_update()
self.get_text_from_clipboard()
def get_text_from_clipboard(self):
cb = self.clipboard
text = cb.text()
self.search_edit.setPlainText(text)
self.on_ai_translate()
def show_about_dialog(self):
AboutDialog()
def check_update(self):
self.t = CheckUpdate()
self.t.start()
def clear_sourceL(self):
self.search_edit.setPlainText("")
def on_theme_chaneged(self):
index = self.theme_comboBox.currentIndex()
if index == 0:
Theme.dark_theme(self)
elif index == 1:
Theme.light_theme(self)
elif index == 2:
Theme.system_theme(self)
elif index == 3:
Theme.custum_theme(self)
def on_google_translate(self):
self.result_edit.setPlainText("")
index = self.get_combox_index()
dest = "zh-cn"
if index == 0:
dest = "zh-cn"
elif index == 1:
dest = "en"
elif index == 2:
dest = "zh-cn"
elif index == 3:
dest = "ja"
elif index == 4:
dest = "ko"
elif index == 5:
dest = "fr"
elif index == 6:
dest = "es"
elif index == 7:
dest = "th"
elif index == 8:
dest = "ru"
elif index == 9:
dest = "de"
else:
pass
text = self.get_word()
if text == "":
self.result_edit.insertHtml(
"<html><font color=fuchsia>请输入关键词!!!</font><br></html>")
return
self.t = GTranslator(dest, text)
self.t.start()
self.result_edit.setPlaceholderText("......")
self.t.trigger.connect(self.google_translate)
def google_translate(self):
global GTransData
if GTransData:
self.result_edit.setPlainText(GTransData)
else:
self.result_edit.setPlainText("error")
GTransData = ""
def on_jscb(self):
# self.result_edit.clear()
# url = "http://106.12.179.253:8089/lsl/api/jsword?words=apple"
# json_str = GetSource.get_source_code(url)
# self.result_edit.insertPlainText(json_str)
self.result_edit.insertHtml("开发中.......")
def on_bing(self):
self.result_edit.clear()
query_word = self.get_word()
url = "http://106.12.179.253:8089/lsl/api/bingword?words="
if query_word == "":
self.result_edit.insertHtml(
"<html><font color=fuchsia>请输入关键词!!!</font></html>")
return
try:
query_word = query_word.strip()
_url = url + query_word
json_str = GetSource.get_source_code(_url)
res_list = json.loads(json_str)
word = res_list['word']
pronounce = res_list['pronouncList']
means = res_list['meansList']
usage = res_list['usageList']
exampleE = res_list['exampleEList']
exampleC = res_list['exampleCList']
tongyici = res_list['tongyiCiList']
fanyici = res_list['fanyiciList']
self.result_edit.insertHtml(
"<html><font>--必应词典--</font><br></html>")
self.result_edit.insertHtml(
"<html><font color=red>" + word + "</font><br></html>")
for p in pronounce:
self.result_edit.insertHtml(
"<html><font color=fuchsia>" + p + "<font><br><br></html>")
self.result_edit.insertHtml(
"<html><font color=red>" + "释义" + "</font><br></html>")
for m in means:
self.result_edit.insertHtml(
"<html><font>" + m + "<font><br></html>")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "用法" + "</font><br></html>")
for u in usage:
self.result_edit.insertHtml(
"<html><font>" + u + "<font><br></html>")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "同义词" + "</font><br></html>")
for tyc in tongyici:
self.result_edit.insertHtml(
"<html><font>" + tyc + "<font><br></html>")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "反义词" + "</font><br></html>")
for fyc in fanyici:
self.result_edit.insertHtml(
"<html><font>" + fyc + "<font><br></html>")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "例句英" + "</font><br></html>")
for ee in exampleE:
_index = exampleE.index(ee)
self.result_edit.insertHtml(
"<html><font>" + str(_index+1) + ". "+ee + "<font><br><br></html>")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "例句汉" + "</font><br></html>")
for ec in exampleC:
self.result_edit.insertHtml(
"<html><font>" + ec + "<font><br></html>")
QApplication.processEvents()
except:
self.result_edit.insertHtml(
"<html><font color=red> 只支持单词查询!</font><br></html>")
def on_yddict(self):
self.result_edit.clear()
url = "http://106.12.179.253:8089/lsl/api/ydword?words="
query_word = self.get_word()
if query_word == "":
self.result_edit.insertHtml(
"<html><font color=fuchsia>请输入关键词!!!</font></html>")
return
try:
query_word = query_word.strip()
_url = url + query_word
json_str = GetSource.get_source_code(_url)
res_list = json.loads(json_str)
word = res_list['word']
pronounce = res_list['pronounce']
means = res_list['means']
example1 = res_list['example1']
example2 = res_list['example2']
tongyici = res_list['tongyici']
cx = res_list['cixing']
self.result_edit.insertHtml(
"<html><font>--有道词典--</font><br></html>")
self.result_edit.insertHtml(
"<html><font color=red>" + word + "</font><br></html>")
for p in pronounce:
self.result_edit.insertHtml(
"<html><font color=fuchsia>" + p + " " + "</font></html>")
self.result_edit.insertHtml("<br>")
for m in means:
self.result_edit.insertPlainText(m + "\r\n")
self.result_edit.insertHtml(
'<html><font color=red>' + "词形" + '</font><br></html>')
for cx in cx:
self.result_edit.insertHtml(cx + "\r\n")
self.result_edit.insertPlainText("\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "柯林斯释义" + "</font><br></html>")
for e1 in example1:
self.result_edit.insertPlainText(e1 + "\r\n")
self.result_edit.insertHtml(
'<html><font color=red>' + "同义词" + '</font><br></html>')
for tong in tongyici:
self.result_edit.insertPlainText(tong + "\r\n")
self.result_edit.insertHtml(
"<html><font color=red>" + "双语例句" + "</font><br></html>")
for e2 in example2:
self.result_edit.insertPlainText(e2 + "\r\n")
QApplication.processEvents()
except:
self.result_edit.insertHtml(
"<html><font color=red> 只支持单词查询!</font><br></html>")
def on_comboBox_changed(self):
self.on_ai_translate()
def get_word(self):
_str = self.search_edit.toPlainText()
return _str
def get_combox_index(self):
_index = self.type_ComboBox.currentIndex()
return _index
def update_label(self):
try:
url = "http://open.iciba.com/dsapi/"
json_str = GetSource.get_source_code(url)
note = json.loads(json_str)
self.poem_label.setText(note['content'])
except:
self.poem_label.setText("Something went wrong ... ")
def center(self):
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width())/2,
(screen.height() - size.height())/2)
def on_ai_search(self):
index = self.get_combox_index()
self.set_result(index)
def on_ai_translate(self):
self.result_edit.setPlainText("")
index = self.get_combox_index()
dest = ""
if index == 0:
dest = "zh"
elif index == 1:
dest = "en"
elif index == 2:
dest = "wyw"
elif index == 3:
dest = "jp"
elif index == 4:
dest = "kor"
elif index == 5:
dest = "fra"
elif index == 6:
dest = "spa"
elif index == 7:
dest = "th"
elif index == 8:
dest = "ru"
elif index == 9:
dest = "de"
else:
pass
text = self.get_word()
text = re.sub(r"\n|\s+", " ", text)
if text == "":
self.result_edit.insertHtml(
"<html><font color=fuchsia>请输入关键词!!!</font><br></html>")
return
self.t = YouDaoTans(dest, text)
self.t.start()
self.result_edit.setPlaceholderText("......")
self.t.trigger.connect(self.on_AI_translate)
def on_AI_translate(self):
global AITransData
if AITransData:
self.result_edit.setPlainText(AITransData)
else:
self.result_edit.setPlainText("error")
AITransData = ""
class YouDaoTans(QThread):
trigger = pyqtSignal()
def __init__(self, dest, text):
super().__init__()
self.dest = dest
self.text = text
def run(self):
global AITransData
try:
strings = GetSource.get_source_code(
"http://106.12.179.253:8089/lsl/api/word?words=" + self.text + "&to=" + self.dest)
res = json.loads(strings)
AITransData = res['result']
except:
AITransData = "error,请重试!"
self.trigger.emit()
class GTranslator(QThread):
trigger = pyqtSignal()
def __init__(self, dest, content):
super().__init__()
self.content = content
self.dest = dest
def run(self):
Data = []
global GTransData
T = Translator(service_urls=["translate.google.cn"])
try:
ts = T.translate(self.content, dest=self.dest)
if isinstance(ts.text, list):
for i in ts:
Data.append(i.text)
GTransData = Data
else:
GTransData = ts.text
except:
GTransData = "An error happended. Please retry..."
self.trigger.emit()
class CheckUpdate(QThread):
trigger = pyqtSignal()
def __init__(self):
super().__init__()
def run(self):
try:
url = "http://106.12.179.253:9999/api/checkUpdate"
code = GetSource.get_source_code(url)
content = json.loads(code)
version = content["version"]
if version != "1.23":
mes = content["message"]
update_dialog = UpdateDialog(mes)
else:
pass
self.trigger.emit()
except:
pass
if __name__ == "__main__":
app = QApplication(sys.argv)
myWin = APP()
myWin.show()
sys.exit(app.exec_())
| 30.591743
| 98
| 0.531789
|
0803fa166f4e4f137e748daa71fc11047c220377
| 2,889
|
py
|
Python
|
wollof/follow.py
|
rm-hull/follow
|
98d8e549ef568a1cb93ca60841b3d506c3bdcb30
|
[
"MIT"
] | null | null | null |
wollof/follow.py
|
rm-hull/follow
|
98d8e549ef568a1cb93ca60841b3d506c3bdcb30
|
[
"MIT"
] | null | null | null |
wollof/follow.py
|
rm-hull/follow
|
98d8e549ef568a1cb93ca60841b3d506c3bdcb30
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import itertools
import select
import subprocess
import sys
import threading
from datetime import datetime
from colored import attr, bg, fg
from wollof.color_palette import MUTED, PASTEL
reset = attr('reset')
error_marker = bg('red') + fg('white')
finished_marker = bg('white') + fg('black')
def _output(color, prefix, line, include_timestamp=True, line_color=''):
if include_timestamp:
timestamp = datetime.now().strftime('%H:%M:%S.%f')
sys.stdout.write(
f'{color}{prefix} | {timestamp} | {reset}{line_color}{line.rstrip()}{reset}\n')
else:
sys.stdout.write(f'{color}{prefix} | {reset}{line.rstrip()}{reset}\n')
def _watch_stream(color, prefix, fp, line_color=''):
while True:
data = fp.readline().decode('utf-8')
if data:
_output(color, prefix, data, line_color=line_color)
else:
break
def _with_keyboard_interrupts_suppressed(func):
def __wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except KeyboardInterrupt:
pass
return __wrapper
def _follow_command(color, command, max_width):
prefix = command[0:max_width].ljust(max_width)
process = subprocess.Popen(command.split(
' '), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout_watcher = threading.Thread(
target=_watch_stream,
args=(color, prefix, process.stdout))
stderr_watcher = threading.Thread(
target=_watch_stream,
args=(color, prefix, process.stderr),
kwargs=dict(line_color=error_marker))
stdout_watcher.start()
stderr_watcher.start()
stdout_watcher.join()
stderr_watcher.join()
process.wait()
_output(color, prefix,
f'*** Process terminated with exit code: {process.returncode} ***', line_color=finished_marker)
def process(argv):
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('commands', metavar='CMD', type=str, nargs='+',
help='commands to run concurrently (use single quotes for passing arguments')
args = parser.parse_args(argv)
max_width = len(max(args.commands, key=len))
follow_cmd = _with_keyboard_interrupts_suppressed(_follow_command)
threads = [threading.Thread(target=follow_cmd, args=(color_name, command, max_width))
for color_name, command in zip(itertools.cycle(PASTEL), args.commands)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
if __name__ == '__main__':
process(['./random_output.py 4',
'./random_output.py 6', './random_output.py 6',
'./random_output.py 6', './random_output.py 6', './random_output.py 6',
'./random_output.py 6', './random_output.py 6'])
| 29.783505
| 107
| 0.653167
|
23abfa2857e1417d7a3f0002ea2105dc57e34967
| 2,664
|
py
|
Python
|
repomas/models.py
|
oumao/mastereg
|
396d389bfbd6488459d8f64e2cd6dadc9b58ad5b
|
[
"MIT"
] | 1
|
2021-03-07T09:44:41.000Z
|
2021-03-07T09:44:41.000Z
|
repomas/models.py
|
oumao/mastereg
|
396d389bfbd6488459d8f64e2cd6dadc9b58ad5b
|
[
"MIT"
] | 1
|
2021-03-21T20:22:17.000Z
|
2021-03-21T20:22:17.000Z
|
repomas/models.py
|
oumao/mastereg
|
396d389bfbd6488459d8f64e2cd6dadc9b58ad5b
|
[
"MIT"
] | null | null | null |
from repomas import db, login_manager
from flask_login import UserMixin
@login_manager.user_loader
def load_user(admin_id):
return Admin.query.get(int(admin_id))
class Admin(db.Model, UserMixin):
__tablename__ = "administrator"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
first_name = db.Column(db.String(50), nullable=False)
last_name = db.Column(db.String(50), nullable=False)
email = db.Column(db.String(50), nullable=False, unique=True)
username = db.Column(db.String(15), nullable=False, unique=True)
password = db.Column(db.String(255), nullable=False)
def __repr__(self) -> str:
return "<Admin: {0} {1} {2} {3}>".format(self.first_name, self.last_name,
self.email, self.username)
class Student(db.Model):
__tablename__ = "students"
__searchable__ = ['admission']
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
first_name = db.Column(db.String(50), nullable=False)
last_name = db.Column(db.String(50), nullable=False)
admission = db.Column(db.String(20), nullable=False, unique=True)
birthdate = db.Column(db.DateTime, nullable=False)
zipcode = db.Column(db.String(10), nullable=False)
county = db.Column(db.String(15), nullable=False)
ward = db.Column(db.String(15), nullable=False)
medicalstat = db.relationship('MedicalStatus', backref='student', lazy='dynamic')
def __repr__(self) -> str:
return "<Student: {0} {1} {2} {3}>".format(
self.first_name, self.last_name, self.admission, self.birthdate,
self.zipcode, self.county, self.ward
)
class MedicalStatus(db.Model):
__tablename__ = 'medicalstatus'
__searchable__ = ['diagnosis', 'outcome', 'need_referral', 'disabled']
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
height = db.Column(db.Float, nullable=False)
weight = db.Column(db.Float, nullable=False)
disabled = db.Column(db.Boolean, nullable=False, default=False)
diagnosis = db.Column(db.String(100), nullable=False)
underlying = db.Column(db.String(50), nullable=False)
drug = db.Column(db.String(50), nullable=False)
outcome = db.Column(db.String(50), nullable=False)
need_referral = db.Column(db.Boolean, nullable=False, default=False)
student_id = db.Column(db.Integer, db.ForeignKey('students.id'))
def __repr__(self) -> str:
return "<Medical Status: {0} {1} {2} {3} {4} {5} {6} {7}>".format(
self.height, self.weight, self.disabled,
self.diagnosis, self.underlying, self.drug,
self.outcome, self.need_referral
)
| 36
| 85
| 0.6753
|
187b4fbe94a221126760180a6b88a7b0450b6264
| 3,677
|
py
|
Python
|
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
CKY_Parser/BackupGrammer.py
|
Deekshantiiitd/NLP-2019
|
36715d6032254bfd684fe4b9dcdebe94c3edaddc
|
[
"Apache-2.0"
] | null | null | null |
import nltk,re,codecs
from nltk.tokenize import word_tokenize,sent_tokenize
from backNode import BackNode
from nltk import Tree
def trace_tree(trace):
if trace.left==None and trace.right==None:
return str(trace.root)+" "+str(trace.word)
return "("+str(trace.root)+"("+str(trace_tree(trace.left))+")"+" "+"("+str(trace_tree(trace.right))+")"+")"
def data_preprosessing():
#fp=codecs.open(f'F:/MTECH1/NLP/Assignment5/Training_set.txt','r',encoding='utf-8',errors='ignore')
#=nltk.data.load("grammars/large_grammars/atis_sentences.txt")
with open('F:/MTECH1/NLP/Assignment5/Training_set.txt') as f:
lines = f.readlines()
for i in range(0,len(lines)):
lines[i]=re.sub(r'\d+\s:\s',"",lines[i])
#print(lines[i])
lines = [line.rstrip('\n') for line in lines]
#print(lines)
#list_sentences=sent_tokenize(s)
"""parser = nltk.parse.BottomUpChartParser(grammer)
for i in list_sentences:
i=word_tokenize(i)
for tree in parser.parse(i):
result=list(tree)
print(result)
for tree in result:
tree.draw()"""
#print(lines)
return lines
lines=data_preprosessing()
def grammer_parse():
grammer=(nltk.data.load("grammars/large_grammars/atis.cfg"))
grammar=grammer.chomsky_normal_form(new_token_padding='#',flexible=False)
grammar_dict={}
for production in grammar.productions():
prod=list(production.rhs())
prod_rhs=" "
for i in prod:
prod_rhs=prod_rhs+" "+str(i)
prod_rhs=prod_rhs.strip()
if prod_rhs in grammar_dict.keys():
temp1=production.lhs()
grammar_dict[prod_rhs].append(temp1)
else:
temp1=production.lhs()
grammar_dict[prod_rhs]=[temp1]
#print(len(grammar_dict))
return grammar_dict
grammar=grammer_parse()
def parse(lines,grammar):
line=[]
line=lines[56].split()
line.insert(0," ")
#x="i need a flight from pittsburgh to newark on monday ."
#line=x.split()
#line.insert(0," ")
length=len(line)
print(line)
tree_set=set()
parse_table=[[ set() for col in range(length+1)] for row in range(length+1)]
back_table=[[ [] for col in range(length+1)] for row in range(length+1)]
#grammer=(nltk.data.load("grammars/large_grammars/atis.cfg"))
#print((grammar))
#grammar=(nltk.data.load("grammars/sample_grammars/toy.cfg"))
#print(type(grammer))
#grammar=grammer.chomsky_normal_form(new_token_padding='#',flexible=False)
#print(grammar)
for k in range(1,len(line)):
if line[k] in grammar.keys():
lhs=grammar[line[k]]
for l in lhs:
parse_table[k][k].add(l)
back_table[k][k].append(BackNode(None,None,l,line[k]))
for w in range(2,length):
#print("*")
for s in range(1,length-w+1):
#print("**")
end=w+s
for m in range(s,end-1):
#print("***")
for p in parse_table[s][m]:
for q in parse_table[m+1][end-1]:
#print(q)
x=str(p)+" "+str(q)
#print(x)
if x in grammar.keys() and (len(x.split())==2):
lhs=grammar[x]
#print(s,m)
for l in lhs:
parse_table[s][end-1].add(l)
prod=x.split()
for r1 in back_table[s][m]:
for r2 in back_table[m+1][end-1]:
#print(s,m)
#print(m+1,end-1)
if(str(r1.root)==prod[0] and str(r2.root)==prod[1]):
back_table[s][end-1].append(BackNode(r1,r2,l,None))
#print(back_table[s][end-1])
#print(back_table)
if ("SIGMA" in str(parse_table[1][length-1])):
#print(back_table)
for pointer in back_table[1][length-1]:
if(str(pointer.root)=="SIGMA"):
value=trace_tree(pointer)
tree_set.add(value)
print(tree_set)
print(len(tree_set))
for result in tree_set:
trees=Tree.fromstring(value)
trees.draw()
else:
print("No parse tree exist")
parse(lines,grammar)
| 25.894366
| 108
| 0.658689
|
07cecadf7d077980e4277349e847e3c7a9b387e0
| 1,168
|
py
|
Python
|
bets/migrations/0001_initial.py
|
tmooney/zach-loss-counter
|
979a212b63695b34c201cdeec1309519422b0bae
|
[
"MIT"
] | null | null | null |
bets/migrations/0001_initial.py
|
tmooney/zach-loss-counter
|
979a212b63695b34c201cdeec1309519422b0bae
|
[
"MIT"
] | 5
|
2019-01-08T18:35:19.000Z
|
2019-03-26T17:58:09.000Z
|
bets/migrations/0001_initial.py
|
tmooney/zach-loss-counter
|
979a212b63695b34c201cdeec1309519422b0bae
|
[
"MIT"
] | 3
|
2018-11-27T18:06:52.000Z
|
2019-03-12T18:02:03.000Z
|
# Generated by Django 2.1.3 on 2018-11-13 19:26
import bets.enums
from django.db import migrations, models
import enumchoicefield.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Bet',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.TextField()),
('wager', models.TextField()),
('bettor', models.CharField(max_length=100)),
('opponent', models.CharField(max_length=100)),
('deadline', models.DateTimeField(verbose_name='end date')),
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='bet made date')),
('outcome', enumchoicefield.fields.EnumChoiceField(blank=True, enum_class=bets.enums.BetResultChoice, max_length=1)),
('state', enumchoicefield.fields.EnumChoiceField(default=bets.enums.BetStateChoice(2), enum_class=bets.enums.BetStateChoice, max_length=1)),
],
),
]
| 37.677419
| 156
| 0.624144
|
89bddc8d45e3862c9b00f9145470fe147b98db69
| 2,643
|
py
|
Python
|
server.py
|
Sicilat/python-server
|
1b04328021ce77321392df9613e7ba079fa518a5
|
[
"MIT"
] | null | null | null |
server.py
|
Sicilat/python-server
|
1b04328021ce77321392df9613e7ba079fa518a5
|
[
"MIT"
] | null | null | null |
server.py
|
Sicilat/python-server
|
1b04328021ce77321392df9613e7ba079fa518a5
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
from logging.handlers import RotatingFileHandler
import pickle
import socket
import threading
class ThreadedServer(object):
def __init__(self, host, port):
self.host = host
self.port = port
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
def listen(self):
self.sock.listen(5)
while True:
client, address = self.sock.accept()
client.settimeout(60)
threading.Thread(target = self.listenToClient,args = (client,address)).start()
def listenToClient(self, client, address):
size = 1024
global logger
while True:
try:
data = client.recv(size)
if data:
#Received data
logger.debug("Received data from client")
else:
raise Exception(logger.error("Client disconnected"))
except:
client.close()
return False
def unpickle_data(tdata):
"""
Unpickle the data from the client
>>> unpickle_data(b'\x80\x03X\n\x00\x00\x00Easy testsq\x00.')
"Easy tests"
>>> unpickle_data("Easy tests")
"Easy tests"
"""
try:
if isinstance(tdata, bytes): #Quick check if tdata is already bytes
data = pickle.loads(tdata)
else:
data = tdata
except:
data = False
return data
def pickle_data(tdata):
"""
Pickle the data for the client
>>> pickle_data(b'\x80\x03X\n\x00\x00\x00Easy testsq\x00.')
b'\x80\x03X\n\x00\x00\x00Easy testsq\x00.'
>>> pickle_data("Easy tests")
b'\x80\x03X\n\x00\x00\x00Easy testsq\x00.'
"""
try:
if isinstance(tdata, bytes): #Quick check if tdata is already bytes
data = tdata
else:
data = pickle.dumps(tdata)
except:
data = False
return data
###Starting the logger###
try:
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
file_handler = RotatingFileHandler('activity.log', 'a', 1000000, 1)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
except:
print("!!! Failed to launch logger !!!")
print("!!! Immediate shutdown !!!")
exit()
###End of logger starting###
logger.info("Server starting...")
| 29.366667
| 90
| 0.600076
|
af4beaa97f32ab91d03b283634ee81b3da03e8cd
| 3,470
|
py
|
Python
|
ReturnBook.py
|
HaseebRajput007/Library-Management-roject-python
|
489cfad4d6937c3b031337ea740093909608fb3f
|
[
"MIT"
] | 1
|
2021-08-02T16:21:45.000Z
|
2021-08-02T16:21:45.000Z
|
ReturnBook.py
|
HaseebRajput007/Library-Management-roject-python
|
489cfad4d6937c3b031337ea740093909608fb3f
|
[
"MIT"
] | null | null | null |
ReturnBook.py
|
HaseebRajput007/Library-Management-roject-python
|
489cfad4d6937c3b031337ea740093909608fb3f
|
[
"MIT"
] | null | null | null |
from tkinter import *
from PIL import ImageTk,Image
from tkinter import messagebox
import pymysql
# Add your own database name and password here to reflect in the code
mypass = "root"
mydatabase="db"
con = pymysql.connect(host="localhost",user="root",password=mypass,database=mydatabase)
cur = con.cursor()
# Enter Table Names here
issueTable = "books_issued" #Issue Table
bookTable = "books" #Book Table
allBid = [] #List To store all Book IDs
def returnn():
global SubmitBtn,labelFrame,lb1,bookInfo1,quitBtn,root,Canvas1,status
bid = bookInfo1.get()
extractBid = "select bid from "+issueTable
try:
cur.execute(extractBid)
con.commit()
for i in cur:
allBid.append(i[0])
if bid in allBid:
checkAvail = "select status from "+bookTable+" where bid = '"+bid+"'"
cur.execute(checkAvail)
con.commit()
for i in cur:
check = i[0]
if check == 'issued':
status = True
else:
status = False
else:
messagebox.showinfo("Error","Book ID not present")
except:
messagebox.showinfo("Error","Can't fetch Book IDs")
issueSql = "delete from "+issueTable+" where bid = '"+bid+"'"
print(bid in allBid)
print(status)
updateStatus = "update "+bookTable+" set status = 'avail' where bid = '"+bid+"'"
try:
if bid in allBid and status == True:
cur.execute(issueSql)
con.commit()
cur.execute(updateStatus)
con.commit()
messagebox.showinfo('Success',"Book Returned Successfully")
else:
allBid.clear()
messagebox.showinfo('Message',"Please check the book ID")
root.destroy()
return
except:
messagebox.showinfo("Search Error","The value entered is wrong, Try again")
allBid.clear()
root.destroy()
def returnBook():
global bookInfo1,SubmitBtn,quitBtn,Canvas1,con,cur,root,labelFrame, lb1
root = Tk()
root.title("Library")
root.minsize(width=400,height=400)
root.geometry("600x500")
Canvas1 = Canvas(root)
Canvas1.config(bg="#006B38")
Canvas1.pack(expand=True,fill=BOTH)
headingFrame1 = Frame(root,bg="#FFBB00",bd=5)
headingFrame1.place(relx=0.25,rely=0.1,relwidth=0.5,relheight=0.13)
headingLabel = Label(headingFrame1, text="Return Book", bg='black', fg='white', font=('Courier',15))
headingLabel.place(relx=0,rely=0, relwidth=1, relheight=1)
labelFrame = Frame(root,bg='black')
labelFrame.place(relx=0.1,rely=0.3,relwidth=0.8,relheight=0.5)
# Book ID to Delete
lb1 = Label(labelFrame,text="Book ID : ", bg='black', fg='white')
lb1.place(relx=0.05,rely=0.5)
bookInfo1 = Entry(labelFrame)
bookInfo1.place(relx=0.3,rely=0.5, relwidth=0.62)
#Submit Button
SubmitBtn = Button(root,text="Return",bg='#d1ccc0', fg='black',command=returnn)
SubmitBtn.place(relx=0.28,rely=0.9, relwidth=0.18,relheight=0.08)
quitBtn = Button(root,text="Quit",bg='#f7f1e3', fg='black', command=root.destroy)
quitBtn.place(relx=0.53,rely=0.9, relwidth=0.18,relheight=0.08)
root.mainloop()
| 30.707965
| 105
| 0.58415
|
73adb04655028191f274ea47274bdbb74c77544a
| 782
|
py
|
Python
|
117-Try dan Except/Script.py
|
dikyindrah/Python-Pemrograman-Dasar-02
|
7b60391d348504eea8ebc36896857e3e92fda3f7
|
[
"MIT"
] | null | null | null |
117-Try dan Except/Script.py
|
dikyindrah/Python-Pemrograman-Dasar-02
|
7b60391d348504eea8ebc36896857e3e92fda3f7
|
[
"MIT"
] | null | null | null |
117-Try dan Except/Script.py
|
dikyindrah/Python-Pemrograman-Dasar-02
|
7b60391d348504eea8ebc36896857e3e92fda3f7
|
[
"MIT"
] | null | null | null |
try:
print(x)
except:
print('error!, variabel x belum didefinisikan.')
import sys
list_item = ['a', 0, 2]
for item in list_item:
try:
print('item :', item)
n = int(item)/int(item)
break
except:
print('Terjadi kesalahan :', sys.exc_info()[0], 'pada kode program')
print('Selanjutnya..\n')
print(item,'/',item, '=', int(n))
try:
for i in range(4):
print(list_item[i])
except Exception as e:
print(f'Terjadi kesalahan {e.__class__} pada kode program.')
try:
print(x)
except NameError as ne:
print(ne)
try:
print(x)
except ValueError as ve:
print(ve)
except NameError as ne:
# Ini adalah penanganan yang sesuai dengan pengecualian
print(ne)
except IndexError as ie:
print(ie)
| 17.772727
| 76
| 0.61509
|
337e9463992dfd119d0fdd128e952edf1af81feb
| 2,477
|
py
|
Python
|
exp-ae-celeba-mafl-30.py
|
d1ngn1gefe1/lmdis-rep
|
b334d7e5eb281948107ac6781807945ff7a4dfc2
|
[
"Apache-2.0"
] | 133
|
2018-06-19T22:17:59.000Z
|
2022-03-03T06:49:45.000Z
|
exp-ae-celeba-mafl-30.py
|
d1ngn1gefe1/lmdis-rep
|
b334d7e5eb281948107ac6781807945ff7a4dfc2
|
[
"Apache-2.0"
] | 5
|
2018-06-26T23:38:50.000Z
|
2019-12-30T02:43:43.000Z
|
exp-ae-celeba-mafl-30.py
|
d1ngn1gefe1/lmdis-rep
|
b334d7e5eb281948107ac6781807945ff7a4dfc2
|
[
"Apache-2.0"
] | 21
|
2018-08-24T09:58:17.000Z
|
2022-01-11T02:15:40.000Z
|
import tensorflow as tf
import os
import sys
from copy import copy
from model.pipeline import Pipeline
from tensorflow.python import debug as tf_debug
if __name__ == "__main__":
num_keypoints = 30
patch_feature_dim = 8
decoding_levels = 5
kp_transform_loss = 1e4
base_recon_weight = 0.1
recon_weight = Pipeline.ValueScheduler(
"piecewise_constant",
[100000, 200000],
[base_recon_weight, base_recon_weight*100, base_recon_weight*1000]
)
base_learning_rate=0.01
learning_rate = Pipeline.ValueScheduler(
"piecewise_constant",
[100000, 200000],
[base_learning_rate, base_learning_rate*0.1, base_learning_rate*0.01]
)
keypoint_separation_bandwidth=0.04
keypoint_separation_loss_weight = 10
opt = {
"optimizer": "Adam",
"data_name": "celeba_mafl_100x100_80x80",
"recon_name": "gaussian_fixedvar_in_01",
"encoder_name": "general_80x80",
"decoder_name": "general_80x80",
"latent_dim": num_keypoints*2+(num_keypoints+1)*patch_feature_dim,
"train_color_jittering": True,
"train_random_mirroring": False,
"train_batch_size": 8,
"train_shuffle_capacity": 1000,
"learning_rate": learning_rate,
"max_epochs": 2000,
"weight_decay": 1e-6,
"test_steps": 5000,
"test_limit": 200,
"recon_weight": recon_weight,
}
opt["encoder_options"] = {
"keypoint_num": num_keypoints,
"patch_feature_dim": patch_feature_dim,
"ae_recon_type": opt["recon_name"],
"keypoint_concentration_loss_weight": 100.,
"keypoint_axis_balancing_loss_weight": 200.,
"keypoint_separation_loss_weight": keypoint_separation_loss_weight,
"keypoint_separation_bandwidth": keypoint_separation_bandwidth,
"keypoint_transform_loss_weight": kp_transform_loss,
"keypoint_decoding_heatmap_levels": decoding_levels,
"keypoint_decoding_heatmap_level_base": 0.5**(1/2),
"image_channels": 3,
}
opt["decoder_options"] = copy(opt["encoder_options"])
# -------------------------------------
model_dir = os.path.join("results/celeba_30")
vp = Pipeline(None, opt, model_dir=model_dir)
print(vp.opt)
with vp.graph.as_default():
sess = vp.create_session()
vp.run_full_train(sess, restore=True)
vp.run_full_test(sess)
| 31.75641
| 81
| 0.650787
|
09a60d1ba89f4bb1ff468d874a0543dae8338cc9
| 1,868
|
py
|
Python
|
tapioca_freshdesk/resource_mapping.py
|
tobiase/tapioca-freshdesk
|
ca499b6755f9194fe87f4b458db0eca2eff061ec
|
[
"MIT"
] | null | null | null |
tapioca_freshdesk/resource_mapping.py
|
tobiase/tapioca-freshdesk
|
ca499b6755f9194fe87f4b458db0eca2eff061ec
|
[
"MIT"
] | null | null | null |
tapioca_freshdesk/resource_mapping.py
|
tobiase/tapioca-freshdesk
|
ca499b6755f9194fe87f4b458db0eca2eff061ec
|
[
"MIT"
] | null | null | null |
# coding: utf-8
RESOURCE_MAPPING = {
"contact": {
"resource": "contacts/{id}",
"docs": [
"https://developers.freshdesk.com/api/#contacts",
"https://developers.freshdesk.com/api/#update_contact",
"https://developers.freshdesk.com/api/#delete_contact",
],
"methods": ["GET", "PUT", "DELETE"],
},
"contacts": {
"resource": "contacts",
"docs": [
"https://developers.freshdesk.com/api/#contacts",
"https://developers.freshdesk.com/api/#create_contact",
],
"methods": ["GET", "POST"],
},
"contacts_filter": {
"resource": "search/contacts",
"docs": "https://developers.freshdesk.com/api/#filter_contacts",
"methods": ["GET"],
},
"contact_delete": {
"resource": "contacts/{id}/hard_delete",
"docs": "https://developers.freshdesk.com/api/#hard_delete_contact",
"methods": ["DELETE"],
},
"contact_restore": {
"resource": "contacts/{id}/restore",
"docs": "https://developers.freshdesk.com/api/#restore_contact",
"methods": ["PUT"],
},
"contact_fields": {
"resource": "contact_fields",
"docs": "https://developers.freshdesk.com/api/#list_all_contact_fields",
"methods": ["GET"],
},
"contact_make_agent": {
"resource": "contacts/{id}/make_agent",
"docs": "https://developers.freshdesk.com/api/#make_agent",
"methods": ["PUT"],
},
"contact_send_invite": {
"resource": "contacts/{id}/send_invite",
"docs": "https://developers.freshdesk.com/api/#send_invite",
"methods": ["PUT"]
},
"contacts_merge": {
"resource": "contacts/merge",
"docs": "https://developers.freshdesk.com/api/#merge_contact",
"methods": ["POST"]
}
}
| 32.77193
| 80
| 0.549786
|
694d14a521e497124c5894ced81253c8f543b15d
| 2,782
|
py
|
Python
|
tests/env/packages/env_test_sphinxcontrib-jsmath.py
|
TerraWilly/foxbms-2
|
62288b333fe6da52deae91f74fb15e71060ac99c
|
[
"CC-BY-4.0"
] | 1
|
2021-04-11T02:02:28.000Z
|
2021-04-11T02:02:28.000Z
|
tests/env/packages/env_test_sphinxcontrib-jsmath.py
|
TerraWilly/foxbms-2
|
62288b333fe6da52deae91f74fb15e71060ac99c
|
[
"CC-BY-4.0"
] | null | null | null |
tests/env/packages/env_test_sphinxcontrib-jsmath.py
|
TerraWilly/foxbms-2
|
62288b333fe6da52deae91f74fb15e71060ac99c
|
[
"CC-BY-4.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @copyright © 2010 - 2021, Fraunhofer-Gesellschaft zur Foerderung der
# angewandten Forschung e.V. All rights reserved.
#
# BSD 3-Clause License
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# We kindly request you to use one or more of the following phrases to refer to
# foxBMS in your hardware, software, documentation or advertising materials:
#
# ″This product uses parts of foxBMS®″
#
# ″This product includes parts of foxBMS®″
#
# ″This product is derived from foxBMS®″
# pylint: disable=invalid-name
"""Testing 'sphinxcontrib-jsmath' package"""
import logging
import argparse
# package to test
from sphinxcontrib import jsmath # pylint: disable=unused-import
def main():
"""Testing 'sphinxcontrib-jsmath' package"""
parser = argparse.ArgumentParser()
parser.add_argument(
"-v",
"--verbosity",
dest="verbosity",
action="count",
default=0,
help="set verbosity level",
)
args = parser.parse_args()
if args.verbosity == 1:
logging.basicConfig(level=logging.INFO)
elif args.verbosity > 1:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.ERROR)
if __name__ == "__main__":
main()
| 37.594595
| 79
| 0.736161
|
a7e4348b1f82234c92c35c59fd7ab3418af57128
| 12,571
|
py
|
Python
|
language/canine/tydiqa/preproc_test.py
|
urikz/language
|
503aca178c98fed4c606cf83e58ae0f84012a4d9
|
[
"Apache-2.0"
] | 1,199
|
2018-10-16T01:30:18.000Z
|
2022-03-31T21:05:24.000Z
|
language/canine/tydiqa/preproc_test.py
|
urikz/language
|
503aca178c98fed4c606cf83e58ae0f84012a4d9
|
[
"Apache-2.0"
] | 116
|
2018-10-18T03:31:46.000Z
|
2022-03-24T13:40:50.000Z
|
language/canine/tydiqa/preproc_test.py
|
urikz/language
|
503aca178c98fed4c606cf83e58ae0f84012a4d9
|
[
"Apache-2.0"
] | 303
|
2018-10-22T12:35:12.000Z
|
2022-03-27T17:38:17.000Z
|
# coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pprint
from typing import Text
from language.canine.tydiqa import char_splitter
from language.canine.tydiqa import preproc
from language.canine.tydiqa import tydi_tokenization_interface
import tensorflow.compat.v1 as tf
# For test_srcdir
flags = tf.flags
FLAGS = flags.FLAGS
def _print_dict(d):
"""Can be used on, e.g. a `json_dict` or `result`."""
print(pprint.PrettyPrinter().pformat(d).replace("°", "@"))
def make_tokenizer() -> tydi_tokenization_interface.TokenizerWithOffsets:
return char_splitter.CharacterSplitter()
_JSON_MIN_ANSWER = {
"document_title":
"Zebra finch",
"document_url":
"https://en.wikipedia.org/wiki/Zebra%20finch",
"example_id":
111,
"language":
"english",
"document_plaintext": #
# Passage 0
"The zebra finch is the most common estrildid finch. The bird has "
"been introduced to Puerto Rico.\n"
# Passage 1
"The body temperature (as measured from the cloaca) of the zebra "
"finch may vary from 38 to 44 °C.\n"
# Passage 2
"The zebra finch was first collected in 1801 during Nicolas "
"Baudin's expedition to Australia. It was described in 1817 by "
"Louis Jean Pierre Vieillot in his Nouveau Dictionnaire d'Histoire "
"Naturelle.\n"
# Passage 3
"Morphological differences between the subspecies. Males do not "
"have the fine barring found on the throat and upper breast.\n"
# Passage 4
"Symmetry of both plumage, like chest bands, and artificial "
"features, like leg bands, are preferred by the female.\n"
# Passage 5
"Nest predators of the zebra finch include the tiger snake.",
"question_text":
"Where are a zebra finch's stripes located?",
"passage_answer_candidates": [{
"plaintext_start_byte": 0,
"plaintext_end_byte": 96,
}, {
"plaintext_start_byte": 97,
"plaintext_end_byte": 194,
}, {
"plaintext_start_byte": 195,
"plaintext_end_byte": 392,
}, {
"plaintext_start_byte": 393,
"plaintext_end_byte": 515,
}, {
"plaintext_start_byte": 516,
"plaintext_end_byte": 629,
}, {
"plaintext_start_byte": 630,
"plaintext_end_byte": 688,
}],
"annotations": [
{
"annotation_id": 222,
"minimal_answer": {
# "chest"
"plaintext_start_byte": 547,
"plaintext_end_byte": 552,
},
"passage_answer": {
"candidate_index": 4
},
"yes_no_answer": "NONE"
},
{
"annotation_id": 333,
"minimal_answer": {
"plaintext_start_byte": -1,
"plaintext_end_byte": -1,
},
"passage_answer": {
"candidate_index": 3
},
"yes_no_answer": "NONE"
},
{
"annotation_id": 444,
"minimal_answer": {
# "throat and upper breast"
"plaintext_start_byte": 491,
"plaintext_end_byte": 514,
},
"passage_answer": {
"candidate_index": 3
},
"yes_no_answer": "NONE"
},
{
"annotation_id": 555,
"minimal_answer": {
# "throat"
"plaintext_start_byte": 491,
"plaintext_end_byte": 497,
},
"passage_answer": {
"candidate_index": 3
},
"yes_no_answer": "NONE"
}
],
}
_JSON_PASSAGE_ANSWER = {
"document_title":
"Zebra finch",
"document_url":
"https://en.wikipedia.org/wiki/Zebra%20finch",
"example_id":
200,
"language":
"english",
"document_plaintext": #
# Passage 0
"The zebra finch is the most common estrildid finch.\n"
# Passage 1
"The body temperature may vary from 38 to 44 °C.\n"
# Passage 2
"Nest predators include the tiger snake.",
"question_text":
"Something without a minimal answer?",
"passage_answer_candidates": [{
"plaintext_start_byte": 0,
"plaintext_end_byte": 51,
}, {
"plaintext_start_byte": 52,
"plaintext_end_byte": 100,
}, {
"plaintext_start_byte": 101,
"plaintext_end_byte": 140,
}],
"annotations": [{
"annotation_id": 300,
"minimal_answer": {
"plaintext_start_byte": -1,
"plaintext_end_byte": -1,
},
"passage_answer": {
"candidate_index": 2
},
"yes_no_answer": "NONE"
}, {
"annotation_id": 400,
"minimal_answer": {
"plaintext_start_byte": -1,
"plaintext_end_byte": -1,
},
"passage_answer": {
"candidate_index": 1
},
"yes_no_answer": "NONE"
}],
}
_JSON_NO_ANSWER = {
"document_title":
"Zebra finch",
"document_url":
"https://en.wikipedia.org/wiki/Zebra%20finch",
"example_id":
200,
"language":
"english",
"document_plaintext": #
# Passage 0
"The zebra finch is the most common estrildid finch.\n"
# Passage 1
"The body temperature may vary from 38 to 44 °C.",
"question_text":
"Something without a minimal answer?",
"passage_answer_candidates": [{
"plaintext_start_byte": 0,
"plaintext_end_byte": 51,
}, {
"plaintext_start_byte": 52,
"plaintext_end_byte": 100,
}],
"annotations": [],
}
class PreprocTest(tf.test.TestCase):
def assertCreateEntryFromJsonResult(
self,
json_dict,
result,
expected_context: Text,
expected_answer_type: Text,
expected_passage_answer_index: int,
expected_min_span_start: int,
expected_min_span_end: int,
expected_min_span_text: Text,
):
self.assertAllEqual(
set(result),
set([
"id", "name", "language", "plaintext", "question", "contexts",
"answer", "context_to_plaintext_offset", "has_correct_context"
]))
# Assert that certain fields are copied from the input.
self.assertEqual(result["id"], str(json_dict["example_id"]))
self.assertEqual(result["name"], json_dict["document_title"])
self.assertEqual(result["language"], json_dict["language"])
self.assertEqual(result["plaintext"], json_dict["document_plaintext"])
self.assertEqual(result["question"]["input_text"],
json_dict["question_text"])
# Assert that the article text is properly augmented, including the
# addition of special passage markers.
self.assertEqual(result["contexts"], expected_context)
# Assert that the correct answer information is retrieved, and that the
# answer span byte offsets into `contexts` have been computed correctly.
self.assertAllEqual(
result["answer"], {
"candidate_id": expected_passage_answer_index,
"input_text": expected_answer_type,
"span_start": expected_min_span_start,
"span_end": expected_min_span_end,
"span_text": expected_min_span_text,
})
context_bytes = result["contexts"].encode()
plaintext_bytes = json_dict["document_plaintext"].encode()
context_to_plaintext_offset = result["context_to_plaintext_offset"]
# Assert that `contexts` actually contains the expected answer at the
# location given by the computed span offsets.
self.assertEqual(
context_bytes[expected_min_span_start:expected_min_span_end].decode(),
expected_min_span_text)
# Assert that the context-to-plaintext mapping exactly covers the bytes
# of `contexts`.
self.assertLen(context_to_plaintext_offset, len(context_bytes))
# Assert that the plaintext and 'contexts' bytes actually match when
# `context_to_plaintext_offset` says they should.
mapped_context_bytes, mapped_plaintext_bytes = (
zip(*[(context_bytes[ci], plaintext_bytes[pi])
for ci, pi in enumerate(context_to_plaintext_offset)
if pi != -1]))
self.assertAllEqual(mapped_context_bytes, mapped_plaintext_bytes)
def test_create_entry_from_json_min_answer(self):
json_dict = _JSON_MIN_ANSWER
result = preproc.create_entry_from_json(
json_dict,
max_passages=45,
max_position=45,
tokenizer=make_tokenizer(),
fail_on_invalid=True)
# Checks that passage markers generated by TyDiTokenizer.get_passage_marker
# are inserted by preproc.create_entry_from_json.
self.assertCreateEntryFromJsonResult(
json_dict=json_dict,
result=result,
expected_context=(
"\ue006 The zebra finch is the most common estrildid finch. "
"The bird has been introduced to Puerto Rico. "
"\ue007 The body temperature (as measured from the cloaca) "
"of the zebra finch may vary from 38 to 44 °C. "
"\ue008 The zebra finch was first collected in 1801 during "
"Nicolas Baudin's expedition to Australia. It was described in "
"1817 by Louis Jean Pierre Vieillot in his Nouveau Dictionnaire "
"d'Histoire Naturelle. "
"\ue009 Morphological differences between the subspecies. "
"Males do not have the fine barring found on the throat and upper "
"breast. "
"\ue00a Symmetry of both plumage, like chest bands, and "
"artificial features, like leg bands, are preferred by the female. "
"\ue00b Nest predators of the zebra finch include the tiger "
"snake."),
expected_answer_type="minimal",
expected_passage_answer_index=3,
expected_min_span_start=507,
expected_min_span_end=530,
expected_min_span_text="throat and upper breast")
def test_create_entry_from_json_passage_answer(self):
json_dict = _JSON_PASSAGE_ANSWER
result = preproc.create_entry_from_json(
json_dict,
max_passages=45,
max_position=45,
tokenizer=make_tokenizer(),
fail_on_invalid=True)
# Checks that passage markers generated by TyDiTokenizer.get_passage_marker
# are inserted by preproc.create_entry_from_json.
self.assertCreateEntryFromJsonResult(
json_dict=json_dict,
result=result,
expected_context=(
"\ue006 The zebra finch is the most common estrildid finch. "
"\ue007 The body temperature may vary from 38 to 44 °C. "
"\ue008 Nest predators include the tiger snake."),
expected_answer_type="passage",
expected_passage_answer_index=1,
expected_min_span_start=60,
expected_min_span_end=108,
expected_min_span_text="The body temperature may vary from 38 to 44 °C."
)
def test_create_entry_from_json_no_answer(self):
json_dict = _JSON_NO_ANSWER
result = preproc.create_entry_from_json(
json_dict,
max_passages=45,
max_position=45,
tokenizer=make_tokenizer(),
fail_on_invalid=True)
# Checks that passage markers generated by TyDiTokenizer.get_passage_marker
# are inserted by preproc.create_entry_from_json.
self.assertCreateEntryFromJsonResult(
json_dict=json_dict,
result=result,
expected_context=(
"\ue006 The zebra finch is the most common estrildid finch. "
"\ue007 The body temperature may vary from 38 to 44 °C."),
expected_answer_type="passage",
expected_passage_answer_index=-1,
expected_min_span_start=-1,
expected_min_span_end=-1,
expected_min_span_text="")
if __name__ == "__main__":
tf.test.main()
| 34.067751
| 80
| 0.622544
|
ae1c6c3c3165a1c712405be584358a5086fea348
| 898
|
py
|
Python
|
forumsweats/commands/removesocialcredit.py
|
zzzzz28/forum-sweats
|
346c281821164ac721a028dee8b726d26374e760
|
[
"MIT"
] | null | null | null |
forumsweats/commands/removesocialcredit.py
|
zzzzz28/forum-sweats
|
346c281821164ac721a028dee8b726d26374e760
|
[
"MIT"
] | null | null | null |
forumsweats/commands/removesocialcredit.py
|
zzzzz28/forum-sweats
|
346c281821164ac721a028dee8b726d26374e760
|
[
"MIT"
] | null | null | null |
from ..commandparser import Member
from forumsweats import db
name = 'removesocialcredit'
aliases = (
'takesocialcredit', 'removesocialcredits', 'takesocialcredits', 'subtractsocialcredits', 'subtractsocialcredit',
'removesc', 'takesc', 'subtractsc'
)
roles = ('mod', 'admin')
args = '[member]'
channels = None
async def run(message, member: Member = None, amount: int = None, reason: str=None):
if not member or not amount:
return await message.reply(f'Invalid member or amount')
amount = -amount
social_credit = await db.change_social_credit(member.id, amount) + 1000
if amount > 0:
await message.channel.send(f'<@{member.id}>, you have earned **{amount}** social credit. You now have a total of {social_credit} social credit.')
else:
await message.channel.send(f'<@{member.id}>, you have lost **{-amount}** social credit. You now have a total of {social_credit} social credit.')
| 39.043478
| 147
| 0.727171
|
115509bef8427986ef94f1a12e0be8e877a0d9fb
| 54
|
py
|
Python
|
apps/users/__init__.py
|
vuonghv/brs
|
9cdf9431ac69fd7a33d8bf4240a7d49a49ae4a80
|
[
"MIT"
] | 1
|
2021-01-13T23:42:54.000Z
|
2021-01-13T23:42:54.000Z
|
apps/users/__init__.py
|
vuonghv/brs
|
9cdf9431ac69fd7a33d8bf4240a7d49a49ae4a80
|
[
"MIT"
] | 1
|
2015-10-09T06:19:29.000Z
|
2015-10-09T06:19:29.000Z
|
apps/users/__init__.py
|
vuonghv/brs
|
9cdf9431ac69fd7a33d8bf4240a7d49a49ae4a80
|
[
"MIT"
] | 8
|
2015-10-09T02:00:34.000Z
|
2016-07-08T15:00:37.000Z
|
default_app_config = 'apps.users.apps.UsersAppConfig'
| 27
| 53
| 0.833333
|
3b490034edc31556e4f11ba1f8512fea2d89bb17
| 7,830
|
py
|
Python
|
tests/v2/test_0086-nep13-ufunc.py
|
BioGeek/awkward-1.0
|
0cfb4e43c41d5c7d9830cc7b1d750485c0a93eb2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/v2/test_0086-nep13-ufunc.py
|
BioGeek/awkward-1.0
|
0cfb4e43c41d5c7d9830cc7b1d750485c0a93eb2
|
[
"BSD-3-Clause"
] | null | null | null |
tests/v2/test_0086-nep13-ufunc.py
|
BioGeek/awkward-1.0
|
0cfb4e43c41d5c7d9830cc7b1d750485c0a93eb2
|
[
"BSD-3-Clause"
] | null | null | null |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
from __future__ import absolute_import
import pytest # noqa: F401
import numpy as np # noqa: F401
import awkward as ak # noqa: F401
def tt(highlevel):
return ak._v2.highlevel.Array(highlevel.layout.typetracer)
def test_basic():
array = ak._v2.highlevel.Array([[1.1, 2.2, 3.3], [], [4.4, 5.5]])
assert ak.to_list(array + array) == [[2.2, 4.4, 6.6], [], [8.8, 11.0]]
assert (array + array).layout.form == (tt(array) + tt(array)).layout.form
assert ak.to_list(array * 2) == [[2.2, 4.4, 6.6], [], [8.8, 11.0]]
assert ak.to_list(2 * array) == [[2.2, 4.4, 6.6], [], [8.8, 11.0]]
assert (array * 2).layout.form == (tt(array) * 2).layout.form
assert (array * 2).layout.form == (2 * tt(array)).layout.form
def test_emptyarray():
one = ak._v2.highlevel.Array(ak._v2.contents.NumpyArray(np.array([])))
two = ak._v2.highlevel.Array(ak._v2.contents.EmptyArray())
assert ak.to_list(one + one) == []
assert ak.to_list(two + two) == []
assert ak.to_list(one + two) == []
assert (one + one).layout.form == (tt(one) + tt(one)).layout.form
assert (two + two).layout.form == (tt(two) + tt(two)).layout.form
assert (one + two).layout.form == (tt(one) + tt(two)).layout.form
def test_indexedarray():
content = ak._v2.contents.NumpyArray(
np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9])
)
index1 = ak._v2.index.Index64(np.array([2, 4, 4, 0, 8], dtype=np.int64))
index2 = ak._v2.index.Index64(np.array([6, 4, 4, 8, 0], dtype=np.int64))
one = ak._v2.highlevel.Array(ak._v2.contents.IndexedArray(index1, content))
two = ak._v2.highlevel.Array(ak._v2.contents.IndexedArray(index2, content))
assert ak.to_list(one + two) == [8.8, 8.8, 8.8, 8.8, 8.8]
assert (one + two).layout.form == (tt(one) + tt(two)).layout.form
def test_indexedoptionarray():
content = ak._v2.contents.NumpyArray(
np.array([0.0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8, 9.9])
)
index1 = ak._v2.index.Index64(np.array([2, -1, 4, 0, 8], dtype=np.int64))
index2 = ak._v2.index.Index64(np.array([-1, 4, 4, -1, 0], dtype=np.int64))
one = ak._v2.highlevel.Array(ak._v2.contents.IndexedOptionArray(index1, content))
two = ak._v2.highlevel.Array(ak._v2.contents.IndexedOptionArray(index2, content))
assert ak.to_list(one + two) == [None, None, 8.8, None, 8.8]
assert (one + two).layout.form == (tt(one) + tt(two)).layout.form
uno = ak._v2.highlevel.Array(
ak._v2.contents.NumpyArray(np.array([2.2, 4.4, 4.4, 0.0, 8.8]))
)
dos = ak._v2.highlevel.Array(
ak._v2.contents.NumpyArray(np.array([6.6, 4.4, 4.4, 8.8, 0.0]))
)
assert ak.to_list(uno + two) == [None, 8.8, 8.8, None, 8.8]
assert (uno + two).layout.form == (tt(uno) + tt(two)).layout.form
assert ak.to_list(one + dos) == [8.8, None, 8.8, 8.8, 8.8]
assert (one + dos).layout.form == (tt(one) + tt(dos)).layout.form
def test_regularize_shape():
array = ak._v2.contents.NumpyArray(np.arange(2 * 3 * 5).reshape(2, 3, 5))
assert isinstance(array.toRegularArray(), ak._v2.contents.RegularArray)
assert ak.to_list(array.toRegularArray()) == ak.to_list(array)
def test_regulararray():
array = ak._v2.highlevel.Array(np.arange(2 * 3 * 5).reshape(2, 3, 5))
assert (
ak.to_list(array + array)
== (np.arange(2 * 3 * 5).reshape(2, 3, 5) * 2).tolist()
)
assert (array + array).layout.form == (tt(array) + tt(array)).layout.form
assert ak.to_list(array * 2) == (np.arange(2 * 3 * 5).reshape(2, 3, 5) * 2).tolist()
assert (array * 2).layout.form == (tt(array) * 2).layout.form
array2 = ak._v2.highlevel.Array(np.arange(2 * 1 * 5).reshape(2, 1, 5))
assert ak.to_list(array + array2) == ak.to_list(
np.arange(2 * 3 * 5).reshape(2, 3, 5) + np.arange(2 * 1 * 5).reshape(2, 1, 5)
)
assert (array + array2).layout.form == (tt(array) + tt(array2)).layout.form
array3 = ak._v2.highlevel.Array(np.arange(2 * 3 * 5).reshape(2, 3, 5).tolist())
assert ak.to_list(array + array3) == ak.to_list(
np.arange(2 * 3 * 5).reshape(2, 3, 5) + np.arange(2 * 3 * 5).reshape(2, 3, 5)
)
assert (array + array3).layout.form == (tt(array) + tt(array3)).layout.form
assert ak.to_list(array3 + array) == ak.to_list(
np.arange(2 * 3 * 5).reshape(2, 3, 5) + np.arange(2 * 3 * 5).reshape(2, 3, 5)
)
assert (array3 + array).layout.form == (tt(array3) + tt(array)).layout.form
def test_listarray():
content = ak._v2.contents.NumpyArray(np.arange(12, dtype=np.int64))
starts = ak._v2.index.Index64(np.array([3, 0, 999, 2, 6, 10], dtype=np.int64))
stops = ak._v2.index.Index64(np.array([7, 3, 999, 4, 6, 12], dtype=np.int64))
one = ak._v2.highlevel.Array(ak._v2.contents.ListArray(starts, stops, content))
two = ak._v2.highlevel.Array(
[[100, 100, 100, 100], [200, 200, 200], [], [300, 300], [], [400, 400]]
)
assert ak.to_list(one) == [[3, 4, 5, 6], [0, 1, 2], [], [2, 3], [], [10, 11]]
assert ak.to_list(one + 100) == [
[103, 104, 105, 106],
[100, 101, 102],
[],
[102, 103],
[],
[110, 111],
]
assert (one + 100).layout.form == (tt(one) + 100).layout.form
assert ak.to_list(one + two) == [
[103, 104, 105, 106],
[200, 201, 202],
[],
[302, 303],
[],
[410, 411],
]
assert (one + two).layout.form == (tt(one) + tt(two)).layout.form
assert ak.to_list(two + one) == [
[103, 104, 105, 106],
[200, 201, 202],
[],
[302, 303],
[],
[410, 411],
]
assert (two + one).layout.form == (tt(two) + tt(one)).layout.form
assert ak.to_list(
one + np.array([100, 200, 300, 400, 500, 600])[:, np.newaxis]
) == [[103, 104, 105, 106], [200, 201, 202], [], [402, 403], [], [610, 611]]
assert ak.to_list(
np.array([100, 200, 300, 400, 500, 600])[:, np.newaxis] + one
) == [[103, 104, 105, 106], [200, 201, 202], [], [402, 403], [], [610, 611]]
assert ak.to_list(one + 100) == [
[103, 104, 105, 106],
[100, 101, 102],
[],
[102, 103],
[],
[110, 111],
]
assert (one + 100).layout.form == (tt(one) + 100).layout.form
def test_unionarray():
one0 = ak._v2.contents.NumpyArray(np.array([0.0, 1.1, 2.2, 3.3], dtype=np.float64))
one1 = ak._v2.contents.NumpyArray(np.array([4, 5], dtype=np.int64))
onetags = ak._v2.index.Index8(np.array([0, 0, 0, 0, 1, 1], dtype=np.int8))
oneindex = ak._v2.index.Index64(np.array([0, 1, 2, 3, 0, 1], dtype=np.int64))
one = ak._v2.highlevel.Array(
ak._v2.contents.UnionArray(onetags, oneindex, [one0, one1])
)
two0 = ak._v2.contents.NumpyArray(np.array([0, 100], dtype=np.int64))
two1 = ak._v2.contents.NumpyArray(
np.array([200.3, 300.3, 400.4, 500.5], dtype=np.float64)
)
twotags = ak._v2.index.Index8(np.array([0, 0, 1, 1, 1, 1], dtype=np.int8))
twoindex = ak._v2.index.Index64(np.array([0, 1, 0, 1, 2, 3], dtype=np.int64))
two = ak._v2.highlevel.Array(
ak._v2.contents.UnionArray(twotags, twoindex, [two0, two1])
)
assert ak.to_list(one) == [0.0, 1.1, 2.2, 3.3, 4, 5]
assert ak.to_list(two) == [0, 100, 200.3, 300.3, 400.4, 500.5]
assert ak.to_list(one + two) == [0.0, 101.1, 202.5, 303.6, 404.4, 505.5]
assert ak.to_list(one + 100) == [100.0, 101.1, 102.2, 103.3, 104, 105]
assert ak.to_list(100 + one) == [100.0, 101.1, 102.2, 103.3, 104, 105]
assert (one + two).layout.form == (tt(one) + tt(two)).layout.form
assert (one + 100).layout.form == (tt(one) + 100).layout.form
assert (100 + one).layout.form == (100 + tt(one)).layout.form
| 43.259669
| 88
| 0.576884
|
d757fd0fa0b575ace877fe038608eb2395658967
| 1,836
|
py
|
Python
|
openstack_dashboard/dashboards/project/data_processing/job_executions/tests.py
|
maofutian/horizon
|
dab92e7d2f576caea8f81c8e22a516fb45633794
|
[
"Apache-2.0"
] | null | null | null |
openstack_dashboard/dashboards/project/data_processing/job_executions/tests.py
|
maofutian/horizon
|
dab92e7d2f576caea8f81c8e22a516fb45633794
|
[
"Apache-2.0"
] | null | null | null |
openstack_dashboard/dashboards/project/data_processing/job_executions/tests.py
|
maofutian/horizon
|
dab92e7d2f576caea8f81c8e22a516fb45633794
|
[
"Apache-2.0"
] | null | null | null |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
INDEX_URL = reverse('horizon:project:data_processing.job_executions:index')
DETAILS_URL = reverse(
'horizon:project:data_processing.job_executions:details', args=['id'])
class DataProcessingJobExecutionTests(test.TestCase):
@test.create_stubs({api.sahara: ('job_execution_list',)})
def test_index(self):
api.sahara.job_execution_list(IsA(http.HttpRequest)) \
.AndReturn(self.job_executions.list())
self.mox.ReplayAll()
res = self.client.get(INDEX_URL)
self.assertTemplateUsed(res,
'project/data_processing.job_executions/job_executions.html')
self.assertContains(res, 'Executions')
@test.create_stubs({api.sahara: ('job_execution_get',)})
def test_details(self):
api.sahara.job_execution_get(IsA(http.HttpRequest), IsA(unicode)) \
.AndReturn(self.job_executions.list()[0])
self.mox.ReplayAll()
res = self.client.get(DETAILS_URL)
self.assertTemplateUsed(res,
'project/data_processing.job_executions/details.html')
self.assertContains(res, 'RUNNING')
| 39.06383
| 75
| 0.727669
|
e83aee2622845b0e07cef678c951c81282af4ec6
| 12,887
|
py
|
Python
|
homeassistant/components/mqtt/siren.py
|
JulienJulien/core
|
b0c36d77292ebcab9810f092c196ef00043ae8b2
|
[
"Apache-2.0"
] | null | null | null |
homeassistant/components/mqtt/siren.py
|
JulienJulien/core
|
b0c36d77292ebcab9810f092c196ef00043ae8b2
|
[
"Apache-2.0"
] | 5
|
2022-03-01T06:32:24.000Z
|
2022-03-31T07:07:45.000Z
|
homeassistant/components/mqtt/siren.py
|
maknetwork/core
|
80da5c52aedb14241837a75adb13eb5de956b8ad
|
[
"Apache-2.0"
] | null | null | null |
"""Support for MQTT sirens."""
from __future__ import annotations
import copy
import functools
import json
import logging
from typing import Any
import voluptuous as vol
from homeassistant.components import siren
from homeassistant.components.siren import (
TURN_ON_SCHEMA,
SirenEntity,
process_turn_on_params,
)
from homeassistant.components.siren.const import (
ATTR_AVAILABLE_TONES,
ATTR_DURATION,
ATTR_TONE,
ATTR_VOLUME_LEVEL,
SUPPORT_DURATION,
SUPPORT_TONES,
SUPPORT_TURN_OFF,
SUPPORT_TURN_ON,
SUPPORT_VOLUME_SET,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_NAME,
CONF_OPTIMISTIC,
CONF_PAYLOAD_OFF,
CONF_PAYLOAD_ON,
)
from homeassistant.core import HomeAssistant, callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import PLATFORMS, MqttCommandTemplate, MqttValueTemplate, subscription
from .. import mqtt
from .const import (
CONF_COMMAND_TEMPLATE,
CONF_COMMAND_TOPIC,
CONF_ENCODING,
CONF_QOS,
CONF_RETAIN,
CONF_STATE_TOPIC,
DOMAIN,
PAYLOAD_EMPTY_JSON,
PAYLOAD_NONE,
)
from .debug_info import log_messages
from .mixins import MQTT_ENTITY_COMMON_SCHEMA, MqttEntity, async_setup_entry_helper
DEFAULT_NAME = "MQTT Siren"
DEFAULT_PAYLOAD_ON = "ON"
DEFAULT_PAYLOAD_OFF = "OFF"
DEFAULT_OPTIMISTIC = False
ENTITY_ID_FORMAT = siren.DOMAIN + ".{}"
CONF_AVAILABLE_TONES = "available_tones"
CONF_COMMAND_OFF_TEMPLATE = "command_off_template"
CONF_STATE_ON = "state_on"
CONF_STATE_OFF = "state_off"
CONF_STATE_VALUE_TEMPLATE = "state_value_template"
CONF_SUPPORT_DURATION = "support_duration"
CONF_SUPPORT_VOLUME_SET = "support_volume_set"
STATE = "state"
PLATFORM_SCHEMA = mqtt.MQTT_RW_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_AVAILABLE_TONES): cv.ensure_list,
vol.Optional(CONF_COMMAND_TEMPLATE): cv.template,
vol.Optional(CONF_COMMAND_OFF_TEMPLATE): cv.template,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean,
vol.Optional(CONF_PAYLOAD_OFF, default=DEFAULT_PAYLOAD_OFF): cv.string,
vol.Optional(CONF_PAYLOAD_ON, default=DEFAULT_PAYLOAD_ON): cv.string,
vol.Optional(CONF_STATE_OFF): cv.string,
vol.Optional(CONF_STATE_ON): cv.string,
vol.Optional(CONF_STATE_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_SUPPORT_DURATION, default=True): cv.boolean,
vol.Optional(CONF_SUPPORT_VOLUME_SET, default=True): cv.boolean,
},
).extend(MQTT_ENTITY_COMMON_SCHEMA.schema)
DISCOVERY_SCHEMA = vol.All(PLATFORM_SCHEMA.extend({}, extra=vol.REMOVE_EXTRA))
MQTT_SIREN_ATTRIBUTES_BLOCKED = frozenset(
{
ATTR_AVAILABLE_TONES,
ATTR_DURATION,
ATTR_TONE,
ATTR_VOLUME_LEVEL,
}
)
SUPPORTED_BASE = SUPPORT_TURN_OFF | SUPPORT_TURN_ON
SUPPORTED_ATTRIBUTES = {
ATTR_DURATION: SUPPORT_DURATION,
ATTR_TONE: SUPPORT_TONES,
ATTR_VOLUME_LEVEL: SUPPORT_VOLUME_SET,
}
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(
hass: HomeAssistant,
config: ConfigType,
async_add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up MQTT siren through configuration.yaml."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
await _async_setup_entity(hass, async_add_entities, config)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up MQTT siren dynamically through MQTT discovery."""
setup = functools.partial(
_async_setup_entity, hass, async_add_entities, config_entry=config_entry
)
await async_setup_entry_helper(hass, siren.DOMAIN, setup, DISCOVERY_SCHEMA)
async def _async_setup_entity(
hass, async_add_entities, config, config_entry=None, discovery_data=None
):
"""Set up the MQTT siren."""
async_add_entities([MqttSiren(hass, config, config_entry, discovery_data)])
class MqttSiren(MqttEntity, SirenEntity):
"""Representation of a siren that can be controlled using MQTT."""
_entity_id_format = ENTITY_ID_FORMAT
_attributes_extra_blocked = MQTT_SIREN_ATTRIBUTES_BLOCKED
def __init__(self, hass, config, config_entry, discovery_data):
"""Initialize the MQTT siren."""
self._attr_name = config[CONF_NAME]
self._attr_should_poll = False
self._supported_features = SUPPORTED_BASE
self._attr_is_on = None
self._state_on = None
self._state_off = None
self._optimistic = None
self._attr_extra_state_attributes: dict[str, Any] = {}
self.target = None
MqttEntity.__init__(self, hass, config, config_entry, discovery_data)
@staticmethod
def config_schema():
"""Return the config schema."""
return DISCOVERY_SCHEMA
def _setup_from_config(self, config):
"""(Re)Setup the entity."""
state_on = config.get(CONF_STATE_ON)
self._state_on = state_on if state_on else config[CONF_PAYLOAD_ON]
state_off = config.get(CONF_STATE_OFF)
self._state_off = state_off if state_off else config[CONF_PAYLOAD_OFF]
if config[CONF_SUPPORT_DURATION]:
self._supported_features |= SUPPORT_DURATION
self._attr_extra_state_attributes[ATTR_DURATION] = None
if config.get(CONF_AVAILABLE_TONES):
self._supported_features |= SUPPORT_TONES
self._attr_available_tones = config[CONF_AVAILABLE_TONES]
self._attr_extra_state_attributes[ATTR_TONE] = None
if config[CONF_SUPPORT_VOLUME_SET]:
self._supported_features |= SUPPORT_VOLUME_SET
self._attr_extra_state_attributes[ATTR_VOLUME_LEVEL] = None
self._optimistic = config[CONF_OPTIMISTIC] or CONF_STATE_TOPIC not in config
self._attr_is_on = False if self._optimistic else None
command_template = config.get(CONF_COMMAND_TEMPLATE)
command_off_template = config.get(CONF_COMMAND_OFF_TEMPLATE) or config.get(
CONF_COMMAND_TEMPLATE
)
self._command_templates = {
CONF_COMMAND_TEMPLATE: MqttCommandTemplate(
command_template, entity=self
).async_render
if command_template
else None,
CONF_COMMAND_OFF_TEMPLATE: MqttCommandTemplate(
command_off_template, entity=self
).async_render
if command_off_template
else None,
}
self._value_template = MqttValueTemplate(
config.get(CONF_STATE_VALUE_TEMPLATE),
entity=self,
).async_render_with_possible_json_value
async def _subscribe_topics(self):
"""(Re)Subscribe to topics."""
@callback
@log_messages(self.hass, self.entity_id)
def state_message_received(msg):
"""Handle new MQTT state messages."""
payload = self._value_template(msg.payload)
if not payload or payload == PAYLOAD_EMPTY_JSON:
_LOGGER.debug(
"Ignoring empty payload '%s' after rendering for topic %s",
payload,
msg.topic,
)
return
json_payload = {}
if payload in [self._state_on, self._state_off, PAYLOAD_NONE]:
json_payload = {STATE: payload}
else:
try:
json_payload = json.loads(payload)
_LOGGER.debug(
"JSON payload detected after processing payload '%s' on topic %s",
json_payload,
msg.topic,
)
except json.decoder.JSONDecodeError:
_LOGGER.warning(
"No valid (JSON) payload detected after processing payload '%s' on topic %s",
json_payload,
msg.topic,
)
return
if STATE in json_payload:
if json_payload[STATE] == self._state_on:
self._attr_is_on = True
if json_payload[STATE] == self._state_off:
self._attr_is_on = False
if json_payload[STATE] == PAYLOAD_NONE:
self._attr_is_on = None
del json_payload[STATE]
if json_payload:
# process attributes
try:
vol.All(TURN_ON_SCHEMA)(json_payload)
except vol.MultipleInvalid as invalid_siren_parameters:
_LOGGER.warning(
"Unable to update siren state attributes from payload '%s': %s",
json_payload,
invalid_siren_parameters,
)
return
self._update(process_turn_on_params(self, json_payload))
self.async_write_ha_state()
if self._config.get(CONF_STATE_TOPIC) is None:
# Force into optimistic mode.
self._optimistic = True
else:
self._sub_state = await subscription.async_subscribe_topics(
self.hass,
self._sub_state,
{
CONF_STATE_TOPIC: {
"topic": self._config.get(CONF_STATE_TOPIC),
"msg_callback": state_message_received,
"qos": self._config[CONF_QOS],
"encoding": self._config[CONF_ENCODING] or None,
}
},
)
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._optimistic
@property
def extra_state_attributes(self) -> dict:
"""Return the state attributes."""
mqtt_attributes = super().extra_state_attributes
attributes = (
copy.deepcopy(mqtt_attributes) if mqtt_attributes is not None else {}
)
attributes.update(self._attr_extra_state_attributes)
return attributes
@property
def supported_features(self) -> int:
"""Flag supported features."""
return self._supported_features
async def _async_publish(
self,
topic: str,
template: str,
value: Any,
variables: dict[str, Any] | None = None,
) -> None:
"""Publish MQTT payload with optional command template."""
template_variables = {STATE: value}
if variables is not None:
template_variables.update(variables)
payload = (
self._command_templates[template](value, template_variables)
if self._command_templates[template]
else json.dumps(template_variables)
)
if payload and payload not in PAYLOAD_NONE:
await mqtt.async_publish(
self.hass,
self._config[topic],
payload,
self._config[CONF_QOS],
self._config[CONF_RETAIN],
self._config[CONF_ENCODING],
)
async def async_turn_on(self, **kwargs) -> None:
"""Turn the siren on.
This method is a coroutine.
"""
await self._async_publish(
CONF_COMMAND_TOPIC,
CONF_COMMAND_TEMPLATE,
self._config[CONF_PAYLOAD_ON],
kwargs,
)
if self._optimistic:
# Optimistically assume that siren has changed state.
_LOGGER.debug("Writing state attributes %s", kwargs)
self._attr_is_on = True
self._update(kwargs)
self.async_write_ha_state()
async def async_turn_off(self, **kwargs) -> None:
"""Turn the siren off.
This method is a coroutine.
"""
await self._async_publish(
CONF_COMMAND_TOPIC,
CONF_COMMAND_OFF_TEMPLATE,
self._config[CONF_PAYLOAD_OFF],
)
if self._optimistic:
# Optimistically assume that siren has changed state.
self._attr_is_on = False
self.async_write_ha_state()
def _update(self, data: dict[str, Any]) -> None:
"""Update the extra siren state attributes."""
for attribute, support in SUPPORTED_ATTRIBUTES.items():
if self._supported_features & support and attribute in data:
self._attr_extra_state_attributes[attribute] = data[attribute]
| 34.365333
| 101
| 0.640878
|
5be3903f96b4f346c15985dd788a5043a9be4de1
| 1,396
|
py
|
Python
|
d7a/dll/frame.py
|
christophevg/pyd7a
|
a785b50c1853988ae6770b94f12422b21a088523
|
[
"Apache-2.0"
] | 3
|
2017-03-14T15:32:02.000Z
|
2021-08-15T18:17:42.000Z
|
d7a/dll/frame.py
|
christophevg/pyd7a
|
a785b50c1853988ae6770b94f12422b21a088523
|
[
"Apache-2.0"
] | null | null | null |
d7a/dll/frame.py
|
christophevg/pyd7a
|
a785b50c1853988ae6770b94f12422b21a088523
|
[
"Apache-2.0"
] | 1
|
2015-11-24T08:00:14.000Z
|
2015-11-24T08:00:14.000Z
|
from d7a.support.schema import Validatable, Types
from d7a.dll.control import Control
from d7a.d7anp.frame import Frame as D7anpFrame
from PyCRC.CRCCCITT import CRCCCITT
class Frame(Validatable):
SCHEMA = [{
"length": Types.BYTE(),
"subnet": Types.BYTE(),
"control": Types.OBJECT(Control),
"target_address": Types.BYTES(), # TODO max size?
"d7anp_frame": Types.OBJECT(D7anpFrame), # TODO assuming foreground frames for now
"crc16" : Types.BITS(16) # TODO does not work, look into this later {'validator': validate_crc }
}]
def __init__(self, length, subnet, control, target_address, d7anp_frame, crc16):
self.length = length
self.subnet = subnet
self.control = control
self.target_address = target_address
self.d7anp_frame = d7anp_frame
self.crc16 = crc16
# TODO validate CRC
super(Frame, self).__init__()
# def validate_crc(self, value, error):
# raw_data = []
# raw_data.append(self.length)
# raw_data.append(self.subnet)
# raw_data.append(self.control)
# raw_data.append(self.target_address)
# raw_data.append(self.payload)
# crc = CRCCCITT().calculate(raw_data)
def __iter__(self):
yield self.length
yield self.subnet
for byte in self.control: yield byte
for byte in self.target_address: yield byte
for byte in self.d7anp_frame: yield byte
yield self.crc16
| 31.727273
| 101
| 0.694842
|
944880beffcf8a2ec0e7e57522130a92e724f73b
| 32,299
|
py
|
Python
|
cinder-14.0.0/cinder/volume/drivers/inspur/as13000/as13000_driver.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | null | null | null |
cinder-14.0.0/cinder/volume/drivers/inspur/as13000/as13000_driver.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
cinder-14.0.0/cinder/volume/drivers/inspur/as13000/as13000_driver.py
|
scottwedge/OpenStack-Stein
|
7077d1f602031dace92916f14e36b124f474de15
|
[
"Apache-2.0"
] | 2
|
2020-03-15T01:24:15.000Z
|
2020-07-22T20:34:26.000Z
|
# Copyright 2017 Inspur Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume driver for Inspur AS13000
"""
import ipaddress
import json
import random
import re
import time
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
import requests
from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder import utils
from cinder.volume.drivers.san import san
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
inspur_as13000_opts = [
cfg.ListOpt(
'as13000_ipsan_pools',
default=['Pool0'],
help='The Storage Pools Cinder should use, a comma separated list.'),
cfg.IntOpt(
'as13000_token_available_time',
default=3300,
min=600, max=3600,
help='The effective time of token validity in seconds.'),
cfg.StrOpt(
'as13000_meta_pool',
help='The pool which is used as a meta pool when creating a volume, '
'and it should be a replication pool at present. '
'If not set, the driver will choose a replication pool '
'from the value of as13000_ipsan_pools.'),
]
CONF = cfg.CONF
CONF.register_opts(inspur_as13000_opts)
class RestAPIExecutor(object):
def __init__(self, hostname, port, username, password):
self._username = username
self._password = password
self._token = None
self._baseurl = 'http://%s:%s/rest' % (hostname, port)
def login(self):
"""Login the AS13000 and store the token."""
self._token = self._login()
LOG.debug('Login the AS13000.')
def _login(self):
"""Do request to login the AS13000 and get the token."""
method = 'security/token'
params = {'name': self._username, 'password': self._password}
token = self.send_rest_api(method=method, params=params,
request_type='post').get('token')
return token
@utils.retry(exception.VolumeDriverException, interval=1, retries=3)
def send_rest_api(self, method, params=None, request_type='post'):
try:
return self.send_api(method, params, request_type)
except exception.VolumeDriverException:
self.login()
raise
@staticmethod
@utils.trace_method
def do_request(cmd, url, header, data):
"""Send request to the storage and handle the response."""
if cmd in ['post', 'get', 'put', 'delete']:
req = getattr(requests, cmd)(url, data=data, headers=header)
else:
msg = (_('Unsupported cmd: %s.') % cmd)
raise exception.VolumeBackendAPIException(msg)
response = req.json()
code = req.status_code
LOG.debug('CODE: %(code)s, RESPONSE: %(response)s.',
{'code': code, 'response': response})
if code != 200:
msg = (_('Code: %(code)s, URL: %(url)s, Message: %(msg)s.')
% {'code': req.status_code,
'url': req.url,
'msg': req.text})
LOG.error(msg)
raise exception.VolumeDriverException(msg)
return response
@utils.trace
def send_api(self, method, params=None, request_type='post'):
if params:
params = json.dumps(params)
url = '%s/%s' % (self._baseurl, method)
# header is not needed when the driver login the backend
if method == 'security/token':
if request_type == 'delete':
header = {'X-Auth-Token': self._token}
else:
header = None
else:
if not self._token:
self.login()
header = {'X-Auth-Token': self._token}
response = self.do_request(request_type, url, header, params)
try:
code = response.get('code')
if code == 0:
if request_type == 'get':
data = response.get('data')
else:
if method == 'security/token':
data = response.get('data')
else:
data = response.get('message')
data = str(data).lower()
if hasattr(data, 'success'):
return
elif code == 301:
msg = _('Token is expired.')
LOG.error(msg)
raise exception.VolumeDriverException(msg)
else:
message = response.get('message')
msg = (_('Unexpected RestAPI response: %(code)d %(msg)s.') % {
'code': code, 'msg': message})
LOG.error(msg)
raise exception.VolumeBackendAPIException(msg)
except ValueError:
msg = _("Deal with response failed.")
raise exception.VolumeDriverException(msg)
return data
@interface.volumedriver
class AS13000Driver(san.SanISCSIDriver):
"""Driver for Inspur AS13000 storage.
.. code-block:: none
Version history:
1.0.0 - Initial driver
"""
VENDOR = 'INSPUR'
VERSION = '1.0.0'
PROTOCOL = 'iSCSI'
# ThirdPartySystems wiki page
CI_WIKI_NAME = 'INSPUR_CI'
def __init__(self, *args, **kwargs):
super(AS13000Driver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(inspur_as13000_opts)
self.hostname = self.configuration.san_ip
self.port = self.configuration.safe_get('san_api_port') or 8088
self.username = self.configuration.san_login
self.password = self.configuration.san_password
self.token_available_time = (self.configuration.
as13000_token_available_time)
self.pools = self.configuration.as13000_ipsan_pools
self.meta_pool = self.configuration.as13000_meta_pool
self.pools_info = {}
self.nodes = []
self._token_time = 0
# get the RestAPIExecutor
self._rest = RestAPIExecutor(self.hostname,
self.port,
self.username,
self.password)
@staticmethod
def get_driver_options():
return inspur_as13000_opts
@utils.trace
def do_setup(self, context):
# get tokens for the driver
self._rest.login()
self._token_time = time.time()
# get available nodes in the backend
for node in self._get_cluster_status():
if node.get('healthStatus') == 1 and node.get('ip'):
self.nodes.append(node)
# collect pools info
meta_pools = [self.meta_pool] if self.meta_pool else []
self.pools_info = self._get_pools_info(self.pools + meta_pools)
# setup the meta pool if it is not setted
if not self.meta_pool:
for pool_info in self.pools_info.values():
if pool_info['type'] in (1, '1'):
self.meta_pool = pool_info['name']
break
self._check_pools()
self._check_meta_pool()
@utils.trace
def check_for_setup_error(self):
"""Do check to make sure service is available."""
# check the required flags in conf
required_flags = ['san_ip', 'san_login', 'san_password',
'as13000_ipsan_pools']
for flag in required_flags:
value = self.configuration.safe_get(flag)
if not value:
msg = (_('Required flag %s is not set.') % flag)
LOG.error(msg)
raise exception.InvalidConfigurationValue(option=flag,
value=value)
# make sure at least one node can
if not self.nodes:
msg = _('No healthy nodes are available!')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
def _check_pools(self):
"""Check the pool in conf exist in the AS13000."""
if not set(self.pools).issubset(self.pools_info):
pools = set(self.pools) - set(self.pools_info)
msg = _('Pools %s do not exist.') % pools
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
def _check_meta_pool(self):
"""Check whether the meta pool is valid."""
if not self.meta_pool:
msg = _('Meta pool is not set.')
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if self.meta_pool not in self.pools_info:
msg = _('Meta pool %s does not exist.') % self.meta_pool
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
if self.pools_info[self.meta_pool]['type'] not in (1, '1'):
msg = _('Meta pool %s is not a replication pool.') % self.meta_pool
LOG.error(msg)
raise exception.InvalidInput(reason=msg)
@utils.trace
def create_volume(self, volume):
"""Create volume in the backend."""
pool = volume_utils.extract_host(volume.host, level='pool')
size = volume.size * units.Ki
name = self._trans_name_down(volume.name)
method = 'block/lvm'
request_type = "post"
params = {
"name": name,
"capacity": size,
"dataPool": pool,
"dataPoolType": self.pools_info[pool]['type'],
"metaPool": self.meta_pool
}
self._rest.send_rest_api(method=method, params=params,
request_type=request_type)
@utils.trace
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a new volume base on a specific snapshot."""
if snapshot.volume_size > volume.size:
msg = (_("create_volume_from_snapshot: snapshot %(snapshot_name)s "
"size is %(snapshot_size)dGB and doesn't fit in target "
"volume %(volume_name)s of size %(volume_size)dGB.") %
{'snapshot_name': snapshot.name,
'snapshot_size': snapshot.volume_size,
'volume_name': volume.name,
'volume_size': volume.size})
LOG.error(msg)
raise exception.InvalidInput(message=msg)
src_vol_name = self._trans_name_down(snapshot.volume_name)
source_vol = snapshot.volume
src_pool = volume_utils.extract_host(source_vol['host'],
level='pool')
dest_name = self._trans_name_down(volume.name)
dest_pool = volume_utils.extract_host(volume.host, level='pool')
snap_name = self._trans_name_down(snapshot.name)
# lock the snapshot before clone from it
self._snapshot_lock_op('lock', src_vol_name, snap_name, src_pool)
# do clone from snap to a volume
method = 'snapshot/volume/cloneLvm'
request_type = 'post'
params = {'originalLvm': src_vol_name,
'originalPool': src_pool,
'originalSnap': snap_name,
'name': dest_name,
'pool': dest_pool}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
# do filling the cloned volume
self._filling_volume(dest_name, dest_pool)
# wait until the cloned volume has been filled
self._wait_volume_filled(dest_name, dest_pool)
# unlock the original snapshot
self._snapshot_lock_op('unlock', src_vol_name, snap_name, src_pool)
if volume.size > snapshot.volume_size:
self.extend_volume(volume, volume.size)
@utils.trace
def create_cloned_volume(self, volume, src_vref):
"""Clone a volume."""
if src_vref.size > volume.size:
msg = (_("create_cloned_volume: source volume %(src_vol)s "
"size is %(src_size)dGB and doesn't fit in target "
"volume %(tgt_vol)s of size %(tgt_size)dGB.") %
{'src_vol': src_vref.name,
'src_size': src_vref.size,
'tgt_vol': volume.name,
'tgt_size': volume.size})
LOG.error(msg)
raise exception.InvalidInput(message=msg)
dest_pool = volume_utils.extract_host(volume.host, level='pool')
dest_vol_name = self._trans_name_down(volume.name)
src_pool = volume_utils.extract_host(src_vref.host, level='pool')
src_vol_name = self._trans_name_down(src_vref.name)
method = 'block/lvm/clone'
request_type = 'post'
params = {'srcVolumeName': src_vol_name,
'srcPoolName': src_pool,
'destVolumeName': dest_vol_name,
'destPoolName': dest_pool}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
if volume.size > src_vref.size:
self.extend_volume(volume, volume.size)
@utils.trace
def extend_volume(self, volume, new_size):
"""Extend volume to new size."""
name = self._trans_name_down(volume.name)
if not self._check_volume(volume):
msg = _('Extend Volume Failed: Volume %s does not exist.') % name
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
size = new_size * units.Ki
pool = volume_utils.extract_host(volume.host, level='pool')
method = 'block/lvm'
request_type = 'put'
params = {'pool': pool,
'name': name,
'newCapacity': size}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
def delete_volume(self, volume):
"""Delete volume from AS13000."""
name = self._trans_name_down(volume.name)
if not self._check_volume(volume):
# if volume is not exist in backend, the driver will do
# nothing but log it
LOG.info('Tried to delete non-existent volume %(name)s.',
{'name': name})
return
pool = volume_utils.extract_host(volume.host, level='pool')
method = 'block/lvm?pool=%s&lvm=%s' % (pool, name)
request_type = 'delete'
self._rest.send_rest_api(method=method, request_type=request_type)
@utils.trace
def create_snapshot(self, snapshot):
"""Create snapshot of volume in backend.
The snapshot type of AS13000 is copy-on-write.
"""
source_volume = snapshot.volume
volume_name = self._trans_name_down(source_volume.name)
if not self._check_volume(source_volume):
msg = (_('create_snapshot: Source_volume %s does not exist.')
% volume_name)
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
pool = volume_utils.extract_host(source_volume.host, level='pool')
snapshot_name = self._trans_name_down(snapshot.name)
method = 'snapshot/volume'
request_type = 'post'
params = {'snapName': snapshot_name,
'volumeName': volume_name,
'poolName': pool,
'snapType': 'r'}
self._rest.send_rest_api(method=method, params=params,
request_type=request_type)
@utils.trace
def delete_snapshot(self, snapshot):
"""Delete snapshot of volume."""
source_volume = snapshot.volume
volume_name = self._trans_name_down(source_volume.name)
if self._check_volume(source_volume) is False:
msg = (_('delete_snapshot: Source_volume %s does not exist.')
% volume_name)
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
pool = volume_utils.extract_host(source_volume.host, level='pool')
snapshot_name = self._trans_name_down(snapshot.name)
method = ('snapshot/volume?snapName=%s&volumeName=%s&poolName=%s'
% (snapshot_name, volume_name, pool))
request_type = 'delete'
self._rest.send_rest_api(method=method, request_type=request_type)
@utils.trace
def get_volume_stats(self, refresh=False):
"""Get volume stats.
If we haven't gotten stats yet or 'refresh' is True,
run update the stats first.
"""
if not self._stats or refresh:
self._update_volume_stats()
return self._stats
@utils.trace
def _update_volume_stats(self):
"""Update the backend stats including driver info and pools info."""
# As _update_volume_stats runs periodically,
# so we can do a check and refresh the token each time it runs.
time_difference = time.time() - self._token_time
if time_difference > self.token_available_time:
self._rest.login()
self._token_time = time.time()
LOG.debug('Token of the Driver has been refreshed.')
# update the backend stats
data = {}
backend_name = self.configuration.safe_get('volume_backend_name')
data['vendor_name'] = self.VENDOR
data['driver_version'] = self.VERSION
data['storage_protocol'] = self.PROTOCOL
data['volume_backend_name'] = backend_name
data['pools'] = self._get_pools_stats()
self._stats = data
LOG.debug('Update volume stats : %(stats)s.', {'stats': self._stats})
def _build_target_portal(self, ip, port):
"""Build iSCSI portal for both IPV4 and IPV6."""
addr = ipaddress.ip_address(ip)
if addr.version == 4:
ipaddr = ip
else:
ipaddr = '[%s]' % ip
return '%(ip)s:%(port)s' % {'ip': ipaddr, 'port': port}
@utils.trace
def initialize_connection(self, volume, connector, **kwargs):
"""Initialize connection steps:
1. check if the host exist in targets.
2.1 if there is target that has the host, add the volume to the target.
2.2 if not, create an target add host to host add volume to host.
3. return the target info.
"""
host_ip = connector['ip']
multipath = connector.get("multipath", False)
# Check if there host exist in targets
host_exist, target_name, node_of_target = self._get_target_from_conn(
host_ip)
if not host_exist:
# host doesn't exist, need create target and bind the host,
# generate the target name
_TARGET_NAME_PATTERN = 'target.inspur.%(host)s-%(padding)s'
_padding = str(random.randint(0, 99999999)).zfill(8)
target_name = _TARGET_NAME_PATTERN % {'host': connector['host'],
'padding': _padding}
# decide the nodes to be used
if multipath:
node_of_target = [node['name'] for node in self.nodes]
else:
# single node
node_of_target = [self.nodes[0]['name']]
# create the target
nodes = ','.join(node_of_target)
self._create_target(target_node=nodes,
target_name=target_name)
self._add_host_to_target(host_ip=host_ip,
target_name=target_name)
self._add_lun_to_target(target_name=target_name, volume=volume)
if self.configuration.use_chap_auth:
self._add_chap_to_target(target_name,
self.configuration.chap_username,
self.configuration.chap_password)
lun_id = self._get_lun_id(volume, target_name)
connection_data = {
'target_discovered': True,
'volume_id': volume.id,
}
portals = []
for node_name in node_of_target:
for node in self.nodes:
if node['name'] == node_name:
portal = self._build_target_portal(node.get('ip'), '3260')
portals.append(portal)
if multipath:
connection_data.update({
'target_portals': portals,
'target_luns': [int(lun_id)] * len(portals),
'target_iqns': [target_name] * len(portals)
})
else:
# single node
connection_data.update({
'target_portal': portals[0],
'target_lun': int(lun_id),
'target_iqn': target_name
})
if self.configuration.use_chap_auth:
connection_data['auth_method'] = 'CHAP'
connection_data['auth_username'] = self.configuration.chap_username
connection_data['auth_password'] = self.configuration.chap_password
return {'driver_volume_type': 'iscsi', 'data': connection_data}
@utils.trace
def terminate_connection(self, volume, connector, **kwargs):
"""Delete lun from target.
If target has no any lun, driver will delete the target.
"""
volume_name = self._trans_name_down(volume.name)
target_name = None
lun_id = None
host_ip = None
if connector and 'ip' in connector:
host_ip = connector['ip']
target_list = self._get_target_list()
for target in target_list:
if not host_ip or host_ip in target['hostIp']:
for lun in target['lun']:
if volume_name == lun['lvm']:
target_name = target['name']
lun_id = lun['lunID']
break
if lun_id is not None:
break
if lun_id is None:
return
self._delete_lun_from_target(target_name=target_name,
lun_id=lun_id)
luns = self._get_lun_list(target_name)
if not luns:
self._delete_target(target_name)
def _get_pools_info(self, pools):
"""Get the pools info."""
method = 'block/pool?type=2'
requests_type = 'get'
pools_data = self._rest.send_rest_api(method=method,
request_type=requests_type)
pools_info = {}
for pool_data in pools_data:
if pool_data['name'] in pools:
pools_info[pool_data['name']] = pool_data
return pools_info
@utils.trace
def _get_pools_stats(self):
"""Generate the pool stat information."""
pools_info = self._get_pools_info(self.pools)
pools = []
for pool_info in pools_info.values():
total_capacity = pool_info.get('totalCapacity')
total_capacity_gb = self._unit_convert(total_capacity)
used_capacity = pool_info.get('usedCapacity')
used_capacity_gb = self._unit_convert(used_capacity)
free_capacity_gb = total_capacity_gb - used_capacity_gb
pool = {
'pool_name': pool_info.get('name'),
'total_capacity_gb': total_capacity_gb,
'free_capacity_gb': free_capacity_gb,
'thin_provisioning_support': True,
'thick_provisioning_support': False,
}
pools.append(pool)
return pools
@utils.trace
def _get_target_from_conn(self, host_ip):
"""Get target information base on the host ip."""
host_exist = False
target_name = None
node = None
target_list = self._get_target_list()
for target in target_list:
if host_ip in target['hostIp']:
host_exist = True
target_name = target['name']
node = target['node']
break
return host_exist, target_name, node
@utils.trace
def _get_target_list(self):
"""Get a list of all targets in the backend."""
method = 'block/target/detail'
request_type = 'get'
data = self._rest.send_rest_api(method=method,
request_type=request_type)
return data
@utils.trace
def _create_target(self, target_name, target_node):
"""Create a target on the specified node."""
method = 'block/target'
request_type = 'post'
params = {'name': target_name, 'nodeName': target_node}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
def _delete_target(self, target_name):
"""Delete all target of all the node."""
method = 'block/target?name=%s' % target_name
request_type = 'delete'
self._rest.send_rest_api(method=method,
request_type=request_type)
@utils.trace
def _add_chap_to_target(self, target_name, chap_username, chap_password):
"""Add CHAP to target."""
method = 'block/chap/bond'
request_type = 'post'
params = {'target': target_name,
'user': chap_username,
'password': chap_password}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
def _add_host_to_target(self, host_ip, target_name):
"""Add the authority of host to target."""
method = 'block/host'
request_type = 'post'
params = {'name': target_name, 'hostIp': host_ip}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
@utils.retry(exceptions=exception.VolumeDriverException,
interval=1,
retries=3)
def _add_lun_to_target(self, target_name, volume):
"""Add volume to target."""
pool = volume_utils.extract_host(volume.host, level='pool')
volume_name = self._trans_name_down(volume.name)
method = 'block/lun'
request_type = 'post'
params = {'name': target_name,
'pool': pool,
'lvm': volume_name}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
def _delete_lun_from_target(self, target_name, lun_id):
"""Delete lun from target_name."""
method = 'block/lun?name=%s&id=%s&force=1' % (target_name, lun_id)
request_type = 'delete'
self._rest.send_rest_api(method=method, request_type=request_type)
@utils.trace
def _get_lun_list(self, target_name):
"""Get all lun list of the target."""
method = 'block/lun?name=%s' % target_name
request_type = 'get'
return self._rest.send_rest_api(method=method,
request_type=request_type)
@utils.trace
def _snapshot_lock_op(self, op, vol_name, snap_name, pool_name):
"""Lock or unlock a snapshot to protect the snapshot.
op is 'lock' for lock and 'unlock' for unlock
"""
method = 'snapshot/volume/%s' % op
request_type = 'post'
params = {'snapName': snap_name,
'volumeName': vol_name,
'poolName': pool_name}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.trace
def _filling_volume(self, name, pool):
"""Filling a volume so that make it independently."""
method = 'block/lvm/filling'
request_type = 'post'
params = {'pool': pool, 'name': name}
self._rest.send_rest_api(method=method,
params=params,
request_type=request_type)
@utils.retry(exception.VolumeDriverException, interval=5, retries=36)
def _wait_volume_filled(self, name, pool):
"""Wait until the volume is filled."""
volumes = self._get_volumes(pool)
for vol in volumes:
if name == vol['name']:
if vol['lvmType'] == 1:
return
else:
break
msg = (_('Volume %s is not filled.') % name)
raise exception.VolumeDriverException(msg)
@utils.trace
def _check_volume(self, volume):
"""Check if the volume exists in the backend."""
pool = volume_utils.extract_host(volume.host, 'pool')
volume_name = self._trans_name_down(volume.name)
attempts = 3
while attempts > 0:
volumes = self._get_volumes(pool)
attempts -= 1
for vol in volumes:
if volume_name == vol.get('name'):
return True
eventlet.sleep(1)
return False
@utils.trace
def _get_volumes(self, pool):
"""Get all the volumes in the pool."""
method = 'block/lvm?pool=%s' % pool
request_type = 'get'
return self._rest.send_rest_api(method=method,
request_type=request_type)
@utils.trace
def _get_cluster_status(self):
"""Get all nodes of the backend."""
method = 'cluster/node'
request_type = 'get'
return self._rest.send_rest_api(method=method,
request_type=request_type)
@utils.trace
def _get_lun_id(self, volume, target_name):
"""Get lun id of the voluem in a target."""
pool = volume_utils.extract_host(volume.host, level='pool')
volume_name = self._trans_name_down(volume.name)
lun_id = None
luns = self._get_lun_list(target_name)
for lun in luns:
mappinglvm = lun.get('mappingLvm')
lun_name = mappinglvm.replace(r'%s/' % pool, '')
if lun_name == volume_name:
lun_id = lun.get('id')
return lun_id
def _trans_name_down(self, name):
"""Legitimize the name.
Because AS13000 volume name is only allowed letters, numbers, and '_'.
"""
return name.replace('-', '_')
@utils.trace
def _unit_convert(self, capacity):
"""Convert all units to GB.
The capacity is a string in form like 100GB, 20TB, 100B,
this routine will convert to GB unit.
"""
capacity = capacity.upper()
try:
unit = re.findall(r'[A-Z]+', capacity)[0]
except BaseException:
unit = ''
capacity = float(capacity.replace(unit, ''))
size_gb = 0.0
if unit in ['B', '']:
size_gb = capacity / units.Gi
elif unit in ['K', 'KB']:
size_gb = capacity / units.Mi
elif unit in ['M', 'MB']:
size_gb = capacity / units.Ki
elif unit in ['G', 'GB']:
size_gb = capacity
elif unit in ['T', 'TB']:
size_gb = capacity * units.Ki
elif unit in ['P', 'PB']:
size_gb = capacity * units.Mi
elif unit in ['E', 'EB']:
size_gb = capacity * units.Gi
return float('%.0f' % size_gb)
| 36.997709
| 79
| 0.569522
|
7a1e5454a83ebf060db62f8aededcdf26feca6f4
| 39,801
|
py
|
Python
|
CellProfiler/cellprofiler/modules/measureobjectneighbors.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
CellProfiler/cellprofiler/modules/measureobjectneighbors.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
CellProfiler/cellprofiler/modules/measureobjectneighbors.py
|
aidotse/Team-rahma.ai
|
66857731e1ca2472e0783e37ba472b55a7ac9cd4
|
[
"MIT"
] | null | null | null |
"""
MeasureObjectNeighbors
======================
**MeasureObjectNeighbors** calculates how many neighbors each object
has and records various properties about the neighbors’ relationships,
including the percentage of an object’s edge pixels that touch a
neighbor.
Given an image with objects identified (e.g., nuclei or cells), this
module determines how many neighbors each object has. You can specify
the distance within which objects should be considered neighbors, or
that objects are only considered neighbors if they are directly
touching.
|
============ ============ ===============
Supports 2D? Supports 3D? Respects masks?
============ ============ ===============
YES YES NO
============ ============ ===============
See also
^^^^^^^^
See also the **Identify** modules.
Measurements made by this module
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
**Object measurements**
- *NumberOfNeighbors:* Number of neighbor objects.
- *PercentTouching:* Percent of the object’s boundary pixels that touch
neighbors, after the objects have been expanded to the specified
distance.
- *FirstClosestObjectNumber:* The index of the closest object.
- *FirstClosestDistance:* The distance to the closest object (in units
of pixels).
- *SecondClosestObjectNumber:* The index of the second closest object.
- *SecondClosestDistance:* The distance to the second closest object (in units
of pixels).
- *AngleBetweenNeighbors:* The angle formed with the object center as
the vertex and the first and second closest object centers along the
vectors.
**Object relationships:** The identity of the neighboring objects, for
each object. Since per-object output is one-to-one and neighbors
relationships are often many-to-one, they may be saved as a separate
file in **ExportToSpreadsheet** by selecting *Object relationships* from
the list of objects to export.
Technical notes
^^^^^^^^^^^^^^^
Objects discarded via modules such as **IdentifyPrimaryObjects** or
**IdentifySecondaryObjects** will still register as neighbors for the
purposes of accurate measurement. For instance, if an object touches a
single object and that object had been discarded, *NumberOfNeighbors*
will be positive, but there may not be a corresponding
*ClosestObjectNumber*. This can be disabled in module settings.
"""
import matplotlib.cm
import numpy
import scipy.ndimage
import scipy.signal
import skimage.morphology
from cellprofiler_core.constants.measurement import COLTYPE_FLOAT
from cellprofiler_core.constants.measurement import COLTYPE_INTEGER
from cellprofiler_core.constants.measurement import MCA_AVAILABLE_EACH_CYCLE
from cellprofiler_core.constants.measurement import NEIGHBORS
from cellprofiler_core.image import Image
from cellprofiler_core.measurement import Measurements
from cellprofiler_core.module import Module
from cellprofiler_core.object import Objects
from cellprofiler_core.preferences import get_default_colormap
from cellprofiler_core.setting import Binary
from cellprofiler_core.setting.choice import Choice, Colormap
from cellprofiler_core.setting.subscriber import LabelSubscriber
from cellprofiler_core.setting.text import ImageName
from cellprofiler_core.setting.text import Integer
from cellprofiler_core.workspace import Workspace
from centrosome.cpmorphology import fixup_scipy_ndimage_result as fix
from centrosome.cpmorphology import strel_disk, centers_of_labels
from centrosome.outline import outline
D_ADJACENT = "Adjacent"
D_EXPAND = "Expand until adjacent"
D_WITHIN = "Within a specified distance"
D_ALL = [D_ADJACENT, D_EXPAND, D_WITHIN]
M_NUMBER_OF_NEIGHBORS = "NumberOfNeighbors"
M_PERCENT_TOUCHING = "PercentTouching"
M_FIRST_CLOSEST_OBJECT_NUMBER = "FirstClosestObjectNumber"
M_FIRST_CLOSEST_DISTANCE = "FirstClosestDistance"
M_SECOND_CLOSEST_OBJECT_NUMBER = "SecondClosestObjectNumber"
M_SECOND_CLOSEST_DISTANCE = "SecondClosestDistance"
M_ANGLE_BETWEEN_NEIGHBORS = "AngleBetweenNeighbors"
M_ALL = [
M_NUMBER_OF_NEIGHBORS,
M_PERCENT_TOUCHING,
M_FIRST_CLOSEST_OBJECT_NUMBER,
M_FIRST_CLOSEST_DISTANCE,
M_SECOND_CLOSEST_OBJECT_NUMBER,
M_SECOND_CLOSEST_DISTANCE,
M_ANGLE_BETWEEN_NEIGHBORS,
]
C_NEIGHBORS = "Neighbors"
S_EXPANDED = "Expanded"
S_ADJACENT = "Adjacent"
class MeasureObjectNeighbors(Module):
module_name = "MeasureObjectNeighbors"
category = "Measurement"
variable_revision_number = 3
def create_settings(self):
self.object_name = LabelSubscriber(
"Select objects to measure",
"None",
doc="""\
Select the objects whose neighbors you want to measure.""",
)
self.neighbors_name = LabelSubscriber(
"Select neighboring objects to measure",
"None",
doc="""\
This is the name of the objects that are potential
neighbors of the above objects. You can find the neighbors
within the same set of objects by selecting the same objects
as above.""",
)
self.distance_method = Choice(
"Method to determine neighbors",
D_ALL,
D_EXPAND,
doc="""\
There are several methods by which to determine whether objects are
neighbors:
- *%(D_ADJACENT)s:* In this mode, two objects must have adjacent
boundary pixels to be neighbors.
- *%(D_EXPAND)s:* The objects are expanded until all pixels on the
object boundaries are touching another. Two objects are neighbors if
any of their boundary pixels are adjacent after expansion.
- *%(D_WITHIN)s:* Each object is expanded by the number of pixels you
specify. Two objects are neighbors if they have adjacent pixels after
expansion.
For *%(D_ADJACENT)s* and *%(D_EXPAND)s*, the
*%(M_PERCENT_TOUCHING)s* measurement is the percentage of pixels on
the boundary of an object that touch adjacent objects. For
*%(D_WITHIN)s*, two objects are touching if any of their boundary
pixels are adjacent after expansion and *%(M_PERCENT_TOUCHING)s*
measures the percentage of boundary pixels of an *expanded* object that
touch adjacent objects.
"""
% globals(),
)
self.distance = Integer(
"Neighbor distance",
5,
1,
doc="""\
*(Used only when “%(D_WITHIN)s” is selected)*
The Neighbor distance is the number of pixels that each object is
expanded for the neighbor calculation. Expanded objects that touch are
considered neighbors.
"""
% globals(),
)
self.wants_count_image = Binary(
"Retain the image of objects colored by numbers of neighbors?",
False,
doc="""\
An output image showing the input objects colored by numbers of
neighbors may be retained. A colormap of your choice shows how many
neighbors each object has. The background is set to -1. Objects are
colored with an increasing color value corresponding to the number of
neighbors, such that objects with no neighbors are given a color
corresponding to 0. Use the **SaveImages** module to save this image to
a file.""",
)
self.count_image_name = ImageName(
"Name the output image",
"ObjectNeighborCount",
doc="""\
*(Used only if the image of objects colored by numbers of neighbors is
to be retained for later use in the pipeline)*
Specify a name that will allow the image of objects colored by numbers
of neighbors to be selected later in the pipeline.""",
)
self.count_colormap = Colormap(
"Select colormap",
value="Blues",
doc="""\
*(Used only if the image of objects colored by numbers of neighbors is
to be retained for later use in the pipeline)*
Select the colormap to use to color the neighbor number image. All
available colormaps can be seen `here`_.
.. _here: http://matplotlib.org/examples/color/colormaps_reference.html""",
)
self.wants_percent_touching_image = Binary(
"Retain the image of objects colored by percent of touching pixels?",
False,
doc="""\
Select *Yes* to keep an image of the input objects colored by the
percentage of the boundary touching their neighbors. A colormap of your
choice is used to show the touching percentage of each object. Use the
**SaveImages** module to save this image to a file.
"""
% globals(),
)
self.touching_image_name = ImageName(
"Name the output image",
"PercentTouching",
doc="""\
*(Used only if the image of objects colored by percent touching is to be
retained for later use in the pipeline)*
Specify a name that will allow the image of objects colored by percent
of touching pixels to be selected later in the pipeline.""",
)
self.touching_colormap = Colormap(
"Select colormap",
value="Oranges",
doc="""\
*(Used only if the image of objects colored by percent touching is to be
retained for later use in the pipeline)*
Select the colormap to use to color the percent touching image. All
available colormaps can be seen `here`_.
.. _here: http://matplotlib.org/examples/color/colormaps_reference.html""",
)
self.wants_excluded_objects = Binary(
"Consider objects discarded for touching image border?",
True,
doc="""\
When set to *{YES}*, objects which were previously discarded for touching
the image borders will be considered as potential object neighbours in this
analysis. You may want to disable this if using object sets which were
further filtered, since those filters won't have been applied to the
previously discarded objects.""".format(
**{"YES": "Yes"}
),
)
def settings(self):
return [
self.object_name,
self.neighbors_name,
self.distance_method,
self.distance,
self.wants_excluded_objects,
self.wants_count_image,
self.count_image_name,
self.count_colormap,
self.wants_percent_touching_image,
self.touching_image_name,
self.touching_colormap,
]
def visible_settings(self):
result = [self.object_name, self.neighbors_name, self.distance_method]
if self.distance_method == D_WITHIN:
result += [self.distance]
result += [self.wants_excluded_objects, self.wants_count_image]
if self.wants_count_image.value:
result += [self.count_image_name, self.count_colormap]
result += [self.wants_percent_touching_image]
if self.wants_percent_touching_image.value:
result += [self.touching_image_name, self.touching_colormap]
return result
@property
def neighbors_are_objects(self):
"""True if the neighbors are taken from the same object set as objects"""
return self.object_name.value == self.neighbors_name.value
def run(self, workspace):
objects = workspace.object_set.get_objects(self.object_name.value)
dimensions = len(objects.shape)
assert isinstance(objects, Objects)
has_pixels = objects.areas > 0
labels = objects.small_removed_segmented
kept_labels = objects.segmented
neighbor_objects = workspace.object_set.get_objects(self.neighbors_name.value)
neighbor_labels = neighbor_objects.small_removed_segmented
neighbor_kept_labels = neighbor_objects.segmented
assert isinstance(neighbor_objects, Objects)
if not self.wants_excluded_objects.value:
# Remove labels not present in kept segmentation while preserving object IDs.
mask = neighbor_kept_labels > 0
neighbor_labels[~mask] = 0
nobjects = numpy.max(labels)
nkept_objects = len(objects.indices)
nneighbors = numpy.max(neighbor_labels)
_, object_numbers = objects.relate_labels(labels, kept_labels)
if self.neighbors_are_objects:
neighbor_numbers = object_numbers
neighbor_has_pixels = has_pixels
else:
_, neighbor_numbers = neighbor_objects.relate_labels(
neighbor_labels, neighbor_objects.small_removed_segmented
)
neighbor_has_pixels = numpy.bincount(neighbor_labels.ravel())[1:] > 0
neighbor_count = numpy.zeros((nobjects,))
pixel_count = numpy.zeros((nobjects,))
first_object_number = numpy.zeros((nobjects,), int)
second_object_number = numpy.zeros((nobjects,), int)
first_x_vector = numpy.zeros((nobjects,))
second_x_vector = numpy.zeros((nobjects,))
first_y_vector = numpy.zeros((nobjects,))
second_y_vector = numpy.zeros((nobjects,))
angle = numpy.zeros((nobjects,))
percent_touching = numpy.zeros((nobjects,))
expanded_labels = None
if self.distance_method == D_EXPAND:
# Find the i,j coordinates of the nearest foreground point
# to every background point
if dimensions == 2:
i, j = scipy.ndimage.distance_transform_edt(
labels == 0, return_distances=False, return_indices=True
)
# Assign each background pixel to the label of its nearest
# foreground pixel. Assign label to label for foreground.
labels = labels[i, j]
else:
k, i, j = scipy.ndimage.distance_transform_edt(
labels == 0, return_distances=False, return_indices=True
)
labels = labels[k, i, j]
expanded_labels = labels # for display
distance = 1 # dilate once to make touching edges overlap
scale = S_EXPANDED
if self.neighbors_are_objects:
neighbor_labels = labels.copy()
elif self.distance_method == D_WITHIN:
distance = self.distance.value
scale = str(distance)
elif self.distance_method == D_ADJACENT:
distance = 1
scale = S_ADJACENT
else:
raise ValueError("Unknown distance method: %s" % self.distance_method.value)
if nneighbors > (1 if self.neighbors_are_objects else 0):
first_objects = []
second_objects = []
object_indexes = numpy.arange(nobjects, dtype=numpy.int32) + 1
#
# First, compute the first and second nearest neighbors,
# and the angles between self and the first and second
# nearest neighbors
#
ocenters = centers_of_labels(objects.small_removed_segmented).transpose()
ncenters = centers_of_labels(
neighbor_objects.small_removed_segmented
).transpose()
areas = fix(
scipy.ndimage.sum(numpy.ones(labels.shape), labels, object_indexes)
)
perimeter_outlines = outline(labels)
perimeters = fix(
scipy.ndimage.sum(
numpy.ones(labels.shape), perimeter_outlines, object_indexes
)
)
i, j = numpy.mgrid[0:nobjects, 0:nneighbors]
distance_matrix = numpy.sqrt(
(ocenters[i, 0] - ncenters[j, 0]) ** 2
+ (ocenters[i, 1] - ncenters[j, 1]) ** 2
)
#
# order[:,0] should be arange(nobjects)
# order[:,1] should be the nearest neighbor
# order[:,2] should be the next nearest neighbor
#
if distance_matrix.shape[1] == 1:
# a little buggy, lexsort assumes that a 2-d array of
# second dimension = 1 is a 1-d array
order = numpy.zeros(distance_matrix.shape, int)
else:
order = numpy.lexsort([distance_matrix])
first_neighbor = 1 if self.neighbors_are_objects else 0
first_object_index = order[:, first_neighbor]
first_x_vector = ncenters[first_object_index, 1] - ocenters[:, 1]
first_y_vector = ncenters[first_object_index, 0] - ocenters[:, 0]
if nneighbors > first_neighbor + 1:
second_object_index = order[:, first_neighbor + 1]
second_x_vector = ncenters[second_object_index, 1] - ocenters[:, 1]
second_y_vector = ncenters[second_object_index, 0] - ocenters[:, 0]
v1 = numpy.array((first_x_vector, first_y_vector))
v2 = numpy.array((second_x_vector, second_y_vector))
#
# Project the unit vector v1 against the unit vector v2
#
dot = numpy.sum(v1 * v2, 0) / numpy.sqrt(
numpy.sum(v1 ** 2, 0) * numpy.sum(v2 ** 2, 0)
)
angle = numpy.arccos(dot) * 180.0 / numpy.pi
# Make the structuring element for dilation
if dimensions == 2:
strel = strel_disk(distance)
else:
strel = skimage.morphology.ball(distance)
#
# A little bigger one to enter into the border with a structure
# that mimics the one used to create the outline
#
if dimensions == 2:
strel_touching = strel_disk(distance + 0.5)
else:
strel_touching = skimage.morphology.ball(distance + 0.5)
#
# Get the extents for each object and calculate the patch
# that excises the part of the image that is "distance"
# away
if dimensions == 2:
i, j = numpy.mgrid[0 : labels.shape[0], 0 : labels.shape[1]]
minimums_i, maximums_i, _, _ = scipy.ndimage.extrema(
i, labels, object_indexes
)
minimums_j, maximums_j, _, _ = scipy.ndimage.extrema(
j, labels, object_indexes
)
minimums_i = numpy.maximum(fix(minimums_i) - distance, 0).astype(int)
maximums_i = numpy.minimum(
fix(maximums_i) + distance + 1, labels.shape[0]
).astype(int)
minimums_j = numpy.maximum(fix(minimums_j) - distance, 0).astype(int)
maximums_j = numpy.minimum(
fix(maximums_j) + distance + 1, labels.shape[1]
).astype(int)
else:
k, i, j = numpy.mgrid[
0 : labels.shape[0], 0 : labels.shape[1], 0 : labels.shape[2]
]
minimums_k, maximums_k, _, _ = scipy.ndimage.extrema(
k, labels, object_indexes
)
minimums_i, maximums_i, _, _ = scipy.ndimage.extrema(
i, labels, object_indexes
)
minimums_j, maximums_j, _, _ = scipy.ndimage.extrema(
j, labels, object_indexes
)
minimums_k = numpy.maximum(fix(minimums_k) - distance, 0).astype(int)
maximums_k = numpy.minimum(
fix(maximums_k) + distance + 1, labels.shape[0]
).astype(int)
minimums_i = numpy.maximum(fix(minimums_i) - distance, 0).astype(int)
maximums_i = numpy.minimum(
fix(maximums_i) + distance + 1, labels.shape[1]
).astype(int)
minimums_j = numpy.maximum(fix(minimums_j) - distance, 0).astype(int)
maximums_j = numpy.minimum(
fix(maximums_j) + distance + 1, labels.shape[2]
).astype(int)
#
# Loop over all objects
# Calculate which ones overlap "index"
# Calculate how much overlap there is of others to "index"
#
for object_number in object_numbers:
if object_number == 0:
#
# No corresponding object in small-removed. This means
# that the object has no pixels, e.g., not renumbered.
#
continue
index = object_number - 1
if dimensions == 2:
patch = labels[
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
npatch = neighbor_labels[
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
else:
patch = labels[
minimums_k[index] : maximums_k[index],
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
npatch = neighbor_labels[
minimums_k[index] : maximums_k[index],
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
#
# Find the neighbors
#
patch_mask = patch == (index + 1)
if distance <= 5:
extended = scipy.ndimage.binary_dilation(patch_mask, strel)
else:
extended = (
scipy.signal.fftconvolve(patch_mask, strel, mode="same") > 0.5
)
neighbors = numpy.unique(npatch[extended])
neighbors = neighbors[neighbors != 0]
if self.neighbors_are_objects:
neighbors = neighbors[neighbors != object_number]
nc = len(neighbors)
neighbor_count[index] = nc
if nc > 0:
first_objects.append(numpy.ones(nc, int) * object_number)
second_objects.append(neighbors)
#
# Find the # of overlapping pixels. Dilate the neighbors
# and see how many pixels overlap our image. Use a 3x3
# structuring element to expand the overlapping edge
# into the perimeter.
#
if dimensions == 2:
outline_patch = (
perimeter_outlines[
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
== object_number
)
else:
outline_patch = (
perimeter_outlines[
minimums_k[index] : maximums_k[index],
minimums_i[index] : maximums_i[index],
minimums_j[index] : maximums_j[index],
]
== object_number
)
if self.neighbors_are_objects:
extendme = (patch != 0) & (patch != object_number)
if distance <= 5:
extended = scipy.ndimage.binary_dilation(
extendme, strel_touching
)
else:
extended = (
scipy.signal.fftconvolve(
extendme, strel_touching, mode="same"
)
> 0.5
)
else:
if distance <= 5:
extended = scipy.ndimage.binary_dilation(
(npatch != 0), strel_touching
)
else:
extended = (
scipy.signal.fftconvolve(
(npatch != 0), strel_touching, mode="same"
)
> 0.5
)
overlap = numpy.sum(outline_patch & extended)
pixel_count[index] = overlap
if sum([len(x) for x in first_objects]) > 0:
first_objects = numpy.hstack(first_objects)
reverse_object_numbers = numpy.zeros(
max(numpy.max(object_numbers), numpy.max(first_objects)) + 1, int
)
reverse_object_numbers[object_numbers] = (
numpy.arange(len(object_numbers)) + 1
)
first_objects = reverse_object_numbers[first_objects]
second_objects = numpy.hstack(second_objects)
reverse_neighbor_numbers = numpy.zeros(
max(numpy.max(neighbor_numbers), numpy.max(second_objects)) + 1, int
)
reverse_neighbor_numbers[neighbor_numbers] = (
numpy.arange(len(neighbor_numbers)) + 1
)
second_objects = reverse_neighbor_numbers[second_objects]
to_keep = (first_objects > 0) & (second_objects > 0)
first_objects = first_objects[to_keep]
second_objects = second_objects[to_keep]
else:
first_objects = numpy.zeros(0, int)
second_objects = numpy.zeros(0, int)
percent_touching = pixel_count * 100 / perimeters
object_indexes = object_numbers - 1
neighbor_indexes = neighbor_numbers - 1
#
# Have to recompute nearest
#
first_object_number = numpy.zeros(nkept_objects, int)
second_object_number = numpy.zeros(nkept_objects, int)
if nkept_objects > (1 if self.neighbors_are_objects else 0):
di = (
ocenters[object_indexes[:, numpy.newaxis], 0]
- ncenters[neighbor_indexes[numpy.newaxis, :], 0]
)
dj = (
ocenters[object_indexes[:, numpy.newaxis], 1]
- ncenters[neighbor_indexes[numpy.newaxis, :], 1]
)
distance_matrix = numpy.sqrt(di * di + dj * dj)
distance_matrix[~has_pixels, :] = numpy.inf
distance_matrix[:, ~neighbor_has_pixels] = numpy.inf
#
# order[:,0] should be arange(nobjects)
# order[:,1] should be the nearest neighbor
# order[:,2] should be the next nearest neighbor
#
order = numpy.lexsort([distance_matrix]).astype(
first_object_number.dtype
)
if self.neighbors_are_objects:
first_object_number[has_pixels] = order[has_pixels, 1] + 1
if nkept_objects > 2:
second_object_number[has_pixels] = order[has_pixels, 2] + 1
else:
first_object_number[has_pixels] = order[has_pixels, 0] + 1
if order.shape[1] > 1:
second_object_number[has_pixels] = order[has_pixels, 1] + 1
else:
object_indexes = object_numbers - 1
neighbor_indexes = neighbor_numbers - 1
first_objects = numpy.zeros(0, int)
second_objects = numpy.zeros(0, int)
#
# Now convert all measurements from the small-removed to
# the final number set.
#
neighbor_count = neighbor_count[object_indexes]
neighbor_count[~has_pixels] = 0
percent_touching = percent_touching[object_indexes]
percent_touching[~has_pixels] = 0
first_x_vector = first_x_vector[object_indexes]
second_x_vector = second_x_vector[object_indexes]
first_y_vector = first_y_vector[object_indexes]
second_y_vector = second_y_vector[object_indexes]
angle = angle[object_indexes]
#
# Record the measurements
#
assert isinstance(workspace, Workspace)
m = workspace.measurements
assert isinstance(m, Measurements)
image_set = workspace.image_set
features_and_data = [
(M_NUMBER_OF_NEIGHBORS, neighbor_count),
(M_FIRST_CLOSEST_OBJECT_NUMBER, first_object_number),
(
M_FIRST_CLOSEST_DISTANCE,
numpy.sqrt(first_x_vector ** 2 + first_y_vector ** 2),
),
(M_SECOND_CLOSEST_OBJECT_NUMBER, second_object_number),
(
M_SECOND_CLOSEST_DISTANCE,
numpy.sqrt(second_x_vector ** 2 + second_y_vector ** 2),
),
(M_ANGLE_BETWEEN_NEIGHBORS, angle),
(M_PERCENT_TOUCHING, percent_touching),
]
for feature_name, data in features_and_data:
m.add_measurement(
self.object_name.value, self.get_measurement_name(feature_name), data
)
if len(first_objects) > 0:
m.add_relate_measurement(
self.module_num,
NEIGHBORS,
self.object_name.value,
self.object_name.value
if self.neighbors_are_objects
else self.neighbors_name.value,
m.image_set_number * numpy.ones(first_objects.shape, int),
first_objects,
m.image_set_number * numpy.ones(second_objects.shape, int),
second_objects,
)
labels = kept_labels
neighbor_count_image = numpy.zeros(labels.shape, int)
object_mask = objects.segmented != 0
object_indexes = objects.segmented[object_mask] - 1
neighbor_count_image[object_mask] = neighbor_count[object_indexes]
workspace.display_data.neighbor_count_image = neighbor_count_image
percent_touching_image = numpy.zeros(labels.shape)
percent_touching_image[object_mask] = percent_touching[object_indexes]
workspace.display_data.percent_touching_image = percent_touching_image
image_set = workspace.image_set
if self.wants_count_image.value:
neighbor_cm_name = self.count_colormap.value
neighbor_cm = get_colormap(neighbor_cm_name)
sm = matplotlib.cm.ScalarMappable(cmap=neighbor_cm)
img = sm.to_rgba(neighbor_count_image)[:, :, :3]
img[:, :, 0][~object_mask] = 0
img[:, :, 1][~object_mask] = 0
img[:, :, 2][~object_mask] = 0
count_image = Image(img, masking_objects=objects)
image_set.add(self.count_image_name.value, count_image)
else:
neighbor_cm_name = "Blues"
neighbor_cm = matplotlib.cm.get_cmap(neighbor_cm_name)
if self.wants_percent_touching_image:
percent_touching_cm_name = self.touching_colormap.value
percent_touching_cm = get_colormap(percent_touching_cm_name)
sm = matplotlib.cm.ScalarMappable(cmap=percent_touching_cm)
img = sm.to_rgba(percent_touching_image)[:, :, :3]
img[:, :, 0][~object_mask] = 0
img[:, :, 1][~object_mask] = 0
img[:, :, 2][~object_mask] = 0
touching_image = Image(img, masking_objects=objects)
image_set.add(self.touching_image_name.value, touching_image)
else:
percent_touching_cm_name = "Oranges"
percent_touching_cm = matplotlib.cm.get_cmap(percent_touching_cm_name)
if self.show_window:
workspace.display_data.neighbor_cm_name = neighbor_cm_name
workspace.display_data.percent_touching_cm_name = percent_touching_cm_name
workspace.display_data.orig_labels = objects.segmented
workspace.display_data.neighbor_labels = neighbor_labels
workspace.display_data.expanded_labels = expanded_labels
workspace.display_data.object_mask = object_mask
workspace.display_data.dimensions = dimensions
def display(self, workspace, figure):
dimensions = workspace.display_data.dimensions
figure.set_subplots((2, 2), dimensions=dimensions)
figure.subplot_imshow_labels(
0,
0,
workspace.display_data.orig_labels,
"Original: %s" % self.object_name.value,
)
object_mask = workspace.display_data.object_mask
expanded_labels = workspace.display_data.expanded_labels
neighbor_count_image = workspace.display_data.neighbor_count_image
neighbor_count_image[~object_mask] = -1
neighbor_cm = get_colormap(workspace.display_data.neighbor_cm_name)
neighbor_cm.set_under((0, 0, 0))
neighbor_cm = matplotlib.cm.ScalarMappable(cmap=neighbor_cm)
percent_touching_cm = get_colormap(
workspace.display_data.percent_touching_cm_name
)
percent_touching_cm.set_under((0, 0, 0))
percent_touching_image = workspace.display_data.percent_touching_image
percent_touching_image[~object_mask] = -1
percent_touching_cm = matplotlib.cm.ScalarMappable(cmap=percent_touching_cm)
expandplot_position = 0
if not self.neighbors_are_objects:
# Display the neighbor object set, move expanded objects plot out of the way
expandplot_position = 1
figure.subplot_imshow_labels(
1,
0,
workspace.display_data.neighbor_labels,
"Neighbors: %s" % self.neighbors_name.value,
)
if numpy.any(object_mask):
figure.subplot_imshow(
0,
1,
neighbor_count_image,
"%s colored by # of neighbors" % self.object_name.value,
colormap=neighbor_cm,
colorbar=True,
vmin=0,
vmax=max(neighbor_count_image.max(), 1),
normalize=False,
sharexy=figure.subplot(0, 0),
)
if self.neighbors_are_objects:
figure.subplot_imshow(
1,
1,
percent_touching_image,
"%s colored by pct touching" % self.object_name.value,
colormap=percent_touching_cm,
colorbar=True,
vmin=0,
vmax=max(percent_touching_image.max(), 1),
normalize=False,
sharexy=figure.subplot(0, 0),
)
else:
# No objects - colorbar blows up.
figure.subplot_imshow(
0,
1,
neighbor_count_image,
"%s colored by # of neighbors" % self.object_name.value,
colormap=neighbor_cm,
vmin=0,
vmax=max(neighbor_count_image.max(), 1),
sharexy=figure.subplot(0, 0),
)
if self.neighbors_are_objects:
figure.subplot_imshow(
1,
1,
percent_touching_image,
"%s colored by pct touching" % self.object_name.value,
colormap=percent_touching_cm,
vmin=0,
vmax=max(neighbor_count_image.max(), 1),
sharexy=figure.subplot(0, 0),
)
if self.distance_method == D_EXPAND:
figure.subplot_imshow_labels(
1,
expandplot_position,
expanded_labels,
"Expanded %s" % self.object_name.value,
sharexy=figure.subplot(0, 0),
)
@property
def all_features(self):
return M_ALL
def get_measurement_name(self, feature):
if self.distance_method == D_EXPAND:
scale = S_EXPANDED
elif self.distance_method == D_WITHIN:
scale = str(self.distance.value)
elif self.distance_method == D_ADJACENT:
scale = S_ADJACENT
if self.neighbors_are_objects:
return "_".join((C_NEIGHBORS, feature, scale))
else:
return "_".join((C_NEIGHBORS, feature, self.neighbors_name.value, scale))
def get_measurement_columns(self, pipeline):
"""Return column definitions for measurements made by this module"""
coltypes = dict(
[
(
feature,
COLTYPE_INTEGER
if feature
in (
M_NUMBER_OF_NEIGHBORS,
M_FIRST_CLOSEST_OBJECT_NUMBER,
M_SECOND_CLOSEST_OBJECT_NUMBER,
)
else COLTYPE_FLOAT,
)
for feature in self.all_features
]
)
return [
(
self.object_name.value,
self.get_measurement_name(feature_name),
coltypes[feature_name],
)
for feature_name in self.all_features
]
def get_object_relationships(self, pipeline):
"""Return column definitions for object relationships output by module"""
objects_name = self.object_name.value
if self.neighbors_are_objects:
neighbors_name = objects_name
else:
neighbors_name = self.neighbors_name.value
return [(NEIGHBORS, objects_name, neighbors_name, MCA_AVAILABLE_EACH_CYCLE,)]
def get_categories(self, pipeline, object_name):
if object_name == self.object_name:
return [C_NEIGHBORS]
return []
def get_measurements(self, pipeline, object_name, category):
if object_name == self.object_name and category == C_NEIGHBORS:
return list(M_ALL)
return []
def get_measurement_objects(self, pipeline, object_name, category, measurement):
if self.neighbors_are_objects or measurement not in self.get_measurements(
pipeline, object_name, category
):
return []
return [self.neighbors_name.value]
def get_measurement_scales(
self, pipeline, object_name, category, measurement, image_name
):
if measurement in self.get_measurements(pipeline, object_name, category):
if self.distance_method == D_EXPAND:
return [S_EXPANDED]
elif self.distance_method == D_ADJACENT:
return [S_ADJACENT]
elif self.distance_method == D_WITHIN:
return [str(self.distance.value)]
else:
raise ValueError(
"Unknown distance method: %s" % self.distance_method.value
)
return []
def upgrade_settings(self, setting_values, variable_revision_number, module_name):
if variable_revision_number == 1:
# Added neighbor objects
# To upgrade, repeat object_name twice
#
setting_values = setting_values[:1] * 2 + setting_values[1:]
variable_revision_number = 2
if variable_revision_number == 2:
# Added border object exclusion
setting_values = setting_values[:4] + [True] + setting_values[4:]
variable_revision_number = 3
return setting_values, variable_revision_number
def volumetric(self):
return True
def get_colormap(name):
"""Get colormap, accounting for possible request for default"""
if name == "Default":
name = get_default_colormap()
return matplotlib.cm.get_cmap(name)
| 41.589342
| 89
| 0.589005
|
5a1fa97f43d96720f902a56bc773db50dbaf67c4
| 5,310
|
py
|
Python
|
analysis.py
|
JiaChenwei/OpenTrafficSimulation
|
59ce3781a4264d2fd419261bfce2a8d1f7550976
|
[
"MIT"
] | 2
|
2020-05-27T10:18:08.000Z
|
2021-02-20T08:12:48.000Z
|
analysis.py
|
JiaChenwei/OpenTrafficSimulation
|
59ce3781a4264d2fd419261bfce2a8d1f7550976
|
[
"MIT"
] | null | null | null |
analysis.py
|
JiaChenwei/OpenTrafficSimulation
|
59ce3781a4264d2fd419261bfce2a8d1f7550976
|
[
"MIT"
] | null | null | null |
import re
import seaborn as sns
import sys
from example import *
from matplotlib import pyplot as plt
import os
from pprint import pprint as pp
mod = sys.argv[1]
path = sys.argv[2]
if path[-1] is not '/':
path = path + '/'
pass
dirs = os.listdir(path)
def plot(mean_matrix, standard_deviation_matrix):
sns.set()
plt.clf()
plt.figure(figsize=(10, 8), dpi=80)
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
ax = sns.heatmap(mean_matrix, annot=True, fmt=".1f", vmin=0, vmax=MAX_SPEED,
cmap="jet_r") # , cmap="RdBu_r", center=MAX_SPEED/2
ax.set_title('平均速度')
ax.set_xlabel('渗透率')
ax.set_ylabel('车流密度')
label_y = ax.get_yticklabels()
plt.setp(label_y, rotation=360, horizontalalignment='right')
label_x = ax.get_xticklabels()
plt.setp(label_x, rotation=45, horizontalalignment='right')
plt.tight_layout()
plt.savefig(path + 'heatmap_mean' + '.jpg')
plt.close()
plt.clf()
plt.figure(figsize=(10, 8), dpi=80)
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
ax = sns.heatmap(standard_deviation_matrix, annot=True, fmt=".2f", vmin=0, vmax=1, cmap="bwr", center=0.2) # cool
ax.set_title('标准差')
ax.set_xlabel('渗透率')
ax.set_ylabel('车流密度')
label_y = ax.get_yticklabels()
plt.setp(label_y, rotation=360, horizontalalignment='right')
label_x = ax.get_xticklabels()
plt.setp(label_x, rotation=45, horizontalalignment='right')
plt.tight_layout()
plt.savefig(path + 'heatmap_standard_deviation' + '.jpg')
plt.close()
if mod == "-one":
dirs_csv = []
for _ in dirs:
if ('csv' in _) and ('data' in _):
dirs_csv.append(_)
pass
pass
tmp = []
tmp.extend(dirs_csv[88:99])
tmp.extend(dirs_csv[:88])
tmp.extend(dirs_csv[99:])
dirs_csv = tmp
pp(dirs_csv)
list_file = []
list_parameter = []
for d in dirs_csv:
k = d
v = re.findall(r"\d+\.?\d*", d)
for _ in range(len(v)):
v[_] = float(v[_])
pass
list_file.append(k)
list_parameter.append(v)
pass
ret = []
max_count = len(list_file)
count = 0
for k, v in zip(list_file, list_parameter):
count += 1
print("(" + str(count) + "/" + str(max_count) + ") " + "wait..." + time.strftime("%Y-%m-%d %H:%M:%S",
time.localtime()))
data = pd.read_csv(path + k,
sep=',',
header=0,
index_col=0,
dtype=np.float64)
data = np.array(data['v'][data['time'] > SAMPLING_TIME]) * 3.6
mean = np.mean(data)
standard_deviation = 2 * ((np.sum(
np.power((data - mean) / MAX_SPEED, 2)
) / len(data)) ** 0.5)
v.extend([mean, standard_deviation])
ret.append(v)
pass
cols = ['traffic_density', 'permeability', 'mean', 'standard_deviation']
ret = pd.DataFrame(ret, columns=cols, dtype='double')
ret.to_csv(path + 'result' + '.csv', sep=',', index=False)
cols = ret['permeability'].unique()
index = ret['traffic_density'].unique()
mean_matrix = pd.DataFrame(np.array(ret['mean']).reshape(11, -1), index=index, columns=cols)
standard_deviation_matrix = pd.DataFrame(np.array(ret['standard_deviation']).reshape(11, -1), index=index,
columns=cols)
plot(mean_matrix, standard_deviation_matrix)
elif mod == "-multi":
for _ in dirs:
if '.' not in _ and _[:4] == 'data':
print(path+_)
os.system("python analysis.py -one %s" % (path+_))
elif mod == "-summary":
mean_matrix = None
standard_deviation_matrix = None
ret = None
dirs_csv = []
for _ in dirs:
file_name, file_type = os.path.splitext(_)
if file_type == '':
tmp = path + _ + '/' + 'result.csv'
print(tmp)
dirs_csv.append(tmp)
pass
pass
if not dirs_csv:
exit()
for _ in dirs_csv:
data = pd.read_csv(_,
sep=',',
header=0,
dtype=np.float64)
if ret is None:
ret = data
else:
ret['mean'] = ret['mean'] + data['mean']
ret['standard_deviation'] = ret['standard_deviation'] + data['standard_deviation']
pass
pass
ret['mean'] = ret['mean'] / len(dirs_csv)
ret['standard_deviation'] = ret['standard_deviation'] / len(dirs_csv)
ret = pd.DataFrame(ret, dtype='double')
print(ret)
ret.to_csv(path + 'result' + '.csv', sep=',', index=False)
cols = ret['permeability'].unique()
index = ret['traffic_density'].unique()
mean_matrix = pd.DataFrame(np.array(ret['mean']).reshape(11, -1), index=index, columns=cols)
standard_deviation_matrix = pd.DataFrame(np.array(ret['standard_deviation']).reshape(11, -1), index=index,
columns=cols)
plot(mean_matrix, standard_deviation_matrix)
else:
exit()
| 31.235294
| 118
| 0.552542
|
a453b8e2bc2951a5bc692f41d70ffec373092fa2
| 20,307
|
py
|
Python
|
sdk/python/pulumi_azure_native/network/v20180701/express_route_circuit_authorization.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180701/express_route_circuit_authorization.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/network/v20180701/express_route_circuit_authorization.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = ['ExpressRouteCircuitAuthorizationArgs', 'ExpressRouteCircuitAuthorization']
@pulumi.input_type
class ExpressRouteCircuitAuthorizationArgs:
def __init__(__self__, *,
circuit_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
authorization_key: Optional[pulumi.Input[str]] = None,
authorization_name: Optional[pulumi.Input[str]] = None,
authorization_use_status: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ExpressRouteCircuitAuthorization resource.
:param pulumi.Input[str] circuit_name: The name of the express route circuit.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] authorization_key: The authorization key.
:param pulumi.Input[str] authorization_name: The name of the authorization.
:param pulumi.Input[Union[str, 'AuthorizationUseStatus']] authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
pulumi.set(__self__, "circuit_name", circuit_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if authorization_key is not None:
pulumi.set(__self__, "authorization_key", authorization_key)
if authorization_name is not None:
pulumi.set(__self__, "authorization_name", authorization_name)
if authorization_use_status is not None:
pulumi.set(__self__, "authorization_use_status", authorization_use_status)
if id is not None:
pulumi.set(__self__, "id", id)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_state is not None:
pulumi.set(__self__, "provisioning_state", provisioning_state)
@property
@pulumi.getter(name="circuitName")
def circuit_name(self) -> pulumi.Input[str]:
"""
The name of the express route circuit.
"""
return pulumi.get(self, "circuit_name")
@circuit_name.setter
def circuit_name(self, value: pulumi.Input[str]):
pulumi.set(self, "circuit_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="authorizationKey")
def authorization_key(self) -> Optional[pulumi.Input[str]]:
"""
The authorization key.
"""
return pulumi.get(self, "authorization_key")
@authorization_key.setter
def authorization_key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorization_key", value)
@property
@pulumi.getter(name="authorizationName")
def authorization_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the authorization.
"""
return pulumi.get(self, "authorization_name")
@authorization_name.setter
def authorization_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "authorization_name", value)
@property
@pulumi.getter(name="authorizationUseStatus")
def authorization_use_status(self) -> Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]]:
"""
AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
"""
return pulumi.get(self, "authorization_use_status")
@authorization_use_status.setter
def authorization_use_status(self, value: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]]):
pulumi.set(self, "authorization_use_status", value)
@property
@pulumi.getter
def id(self) -> Optional[pulumi.Input[str]]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@id.setter
def id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "id", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> Optional[pulumi.Input[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@provisioning_state.setter
def provisioning_state(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_state", value)
class ExpressRouteCircuitAuthorization(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorization_key: Optional[pulumi.Input[str]] = None,
authorization_name: Optional[pulumi.Input[str]] = None,
authorization_use_status: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]] = None,
circuit_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Authorization in an ExpressRouteCircuit resource.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] authorization_key: The authorization key.
:param pulumi.Input[str] authorization_name: The name of the authorization.
:param pulumi.Input[Union[str, 'AuthorizationUseStatus']] authorization_use_status: AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
:param pulumi.Input[str] circuit_name: The name of the express route circuit.
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[str] name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ExpressRouteCircuitAuthorizationArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Authorization in an ExpressRouteCircuit resource.
:param str resource_name: The name of the resource.
:param ExpressRouteCircuitAuthorizationArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ExpressRouteCircuitAuthorizationArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
authorization_key: Optional[pulumi.Input[str]] = None,
authorization_name: Optional[pulumi.Input[str]] = None,
authorization_use_status: Optional[pulumi.Input[Union[str, 'AuthorizationUseStatus']]] = None,
circuit_name: Optional[pulumi.Input[str]] = None,
id: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ExpressRouteCircuitAuthorizationArgs.__new__(ExpressRouteCircuitAuthorizationArgs)
__props__.__dict__["authorization_key"] = authorization_key
__props__.__dict__["authorization_name"] = authorization_name
__props__.__dict__["authorization_use_status"] = authorization_use_status
if circuit_name is None and not opts.urn:
raise TypeError("Missing required property 'circuit_name'")
__props__.__dict__["circuit_name"] = circuit_name
__props__.__dict__["id"] = id
__props__.__dict__["name"] = name
__props__.__dict__["provisioning_state"] = provisioning_state
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["etag"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/v20180701:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20150501preview:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20150615:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20150615:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20160330:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20160330:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20160601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20160601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20160901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20160901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20161201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20161201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20170301:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20170301:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20170601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20170601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20170801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20170801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20170901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20170901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20171001:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20171001:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20171101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20171101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20180101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20180101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20180201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20180201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20180401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20180401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20180601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20180601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20180801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20180801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20181001:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20181001:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20181101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20181101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20181201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20181201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190701:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190701:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20190901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20190901:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20191101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20191101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20191201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20191201:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200301:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200301:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200401:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200501:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200501:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200601:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200701:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200701:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20200801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20200801:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-native:network/v20201101:ExpressRouteCircuitAuthorization"), pulumi.Alias(type_="azure-nextgen:network/v20201101:ExpressRouteCircuitAuthorization")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ExpressRouteCircuitAuthorization, __self__).__init__(
'azure-native:network/v20180701:ExpressRouteCircuitAuthorization',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ExpressRouteCircuitAuthorization':
"""
Get an existing ExpressRouteCircuitAuthorization resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ExpressRouteCircuitAuthorizationArgs.__new__(ExpressRouteCircuitAuthorizationArgs)
__props__.__dict__["authorization_key"] = None
__props__.__dict__["authorization_use_status"] = None
__props__.__dict__["etag"] = None
__props__.__dict__["name"] = None
__props__.__dict__["provisioning_state"] = None
return ExpressRouteCircuitAuthorization(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="authorizationKey")
def authorization_key(self) -> pulumi.Output[Optional[str]]:
"""
The authorization key.
"""
return pulumi.get(self, "authorization_key")
@property
@pulumi.getter(name="authorizationUseStatus")
def authorization_use_status(self) -> pulumi.Output[Optional[str]]:
"""
AuthorizationUseStatus. Possible values are: 'Available' and 'InUse'.
"""
return pulumi.get(self, "authorization_use_status")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
"""
Gets name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
Gets the provisioning state of the public IP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
| 66.799342
| 6,435
| 0.725464
|
d3e58cb3ed32af962dda238a632caff912d015c7
| 843
|
py
|
Python
|
api_basebone/drf/pagination.py
|
git-men/bsm-django
|
46d1fcbd8ca379d20a3396fd7ea529ccf998f59d
|
[
"MIT"
] | 90
|
2020-12-07T04:49:43.000Z
|
2022-03-31T08:24:35.000Z
|
api_basebone/drf/pagination.py
|
flyowl/lightning
|
946c98986c1c42bf8c28f203cdf8512262283c25
|
[
"MIT"
] | 4
|
2021-01-11T16:10:55.000Z
|
2022-02-18T12:13:23.000Z
|
api_basebone/drf/pagination.py
|
flyowl/lightning
|
946c98986c1c42bf8c28f203cdf8512262283c25
|
[
"MIT"
] | 16
|
2020-12-07T12:32:05.000Z
|
2022-01-30T05:36:51.000Z
|
from rest_framework.pagination import (
_positive_int,
PageNumberPagination as OriginPageNumberPagination,
)
from rest_framework.response import Response
class PageNumberPagination(OriginPageNumberPagination):
max_page_size = 1000
page_size = 100
page_query_param = 'page'
page_size_query_param = 'size'
def get_page_size(self, request):
"""重写此方法是为了支持以下场景
- 当传入的数据包含分页参数时,返回对应的分页数据结构
- 当传入的数据不包含分页参数时,直接返回非分页数据的数据结构
"""
if self.page_size_query_param:
try:
return _positive_int(
request.query_params[self.page_size_query_param],
strict=True,
cutoff=self.max_page_size,
)
except (KeyError, ValueError):
return
return self.page_size
| 27.193548
| 69
| 0.626335
|
dd9e996b07572d56d1a97647204b5a4bc9c7c14e
| 282
|
py
|
Python
|
tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_7/ar_12/test_artificial_32_RelativeDifference_MovingAverage_7_12_20.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_7/ar_12/test_artificial_32_RelativeDifference_MovingAverage_7_12_20.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_7/ar_12/test_artificial_32_RelativeDifference_MovingAverage_7_12_20.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 7, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 12);
| 40.285714
| 177
| 0.744681
|
31e2193969e30c6bf94a4cf818ae4c3e562fbb27
| 5,133
|
py
|
Python
|
DRP_Backend/settings.py
|
leonardodalinky/device-rental-platform-backend
|
9ca9137ebce2ec241c6aecd73128c28941e3d6c4
|
[
"MIT"
] | null | null | null |
DRP_Backend/settings.py
|
leonardodalinky/device-rental-platform-backend
|
9ca9137ebce2ec241c6aecd73128c28941e3d6c4
|
[
"MIT"
] | null | null | null |
DRP_Backend/settings.py
|
leonardodalinky/device-rental-platform-backend
|
9ca9137ebce2ec241c6aecd73128c28941e3d6c4
|
[
"MIT"
] | null | null | null |
"""
Django settings for DRP_Backend project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
# TODO: 千万别删
from .mail_settings import *
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'er$uhr6bw-5@ic&9(n1pwhilcb6^20^p6dr4zp2=o6(q+ye5n1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ALLOWED_HOSTS = ['ayajike.xyz', '127.0.0.1']
# Application definition
INSTALLED_APPS = [
'webservice.apps.WebserviceConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'webservice.apps.LoginRequireMiddleware',
'webservice.apps.MethodValidateMiddleware',
'webservice.apps.PermissionValidateMiddleware',
'webservice.apps.UserLogMiddleware',
]
ROOT_URLCONF = 'DRP_Backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'DRP_Backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
# session 设置
SESSION_ENGINE = 'django.contrib.sessions.backends.db' # 引擎(默认)
SESSION_COOKIE_NAME = "session_id" # Session的cookie保存在浏览器上时的key,即:session_id=随机字符串(默认)
SESSION_COOKIE_PATH = "/" # Session的cookie保存的路径(默认)
SESSION_COOKIE_DOMAIN = None # Session的cookie保存的域名(默认)
SESSION_COOKIE_SECURE = False # 是否Https传输cookie(默认)
SESSION_COOKIE_HTTPONLY = True # 是否Session的cookie只支持http传输(默认)
SESSION_COOKIE_AGE = 604800 # Session的cookie失效日期(1周)
SESSION_EXPIRE_AT_BROWSER_CLOSE = False # 是否关闭浏览器使得Session过期(默认)
SESSION_SAVE_EVERY_REQUEST = False # 是否每次请求都保存Session,默认修改之后才保存(默认)
AUTH_USER_MODEL = 'webservice.User'
"""
发件设置
EMAIL_HOST = ''
EMAIL_PORT = 465
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_SSL = True
"""
# 日志打印
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '{asctime} {module}.{funcName} {lineno:3} {levelname:7} => {message}',
'style': '{',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'file': {
'class': 'logging.handlers.RotatingFileHandler',
'formatter': 'verbose',
'filename': './server.log',
'maxBytes': 4194304, # 4 MB
'backupCount': 10,
'level': 'DEBUG',
'encoding': 'utf-8',
},
},
'loggers': {
'': {
'handlers': ['console', 'file'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'django': {
'handlers': ['console', 'file'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
'propagate': False,
},
},
}
| 27.745946
| 92
| 0.661991
|
8ec8cab2296fc86d72f2234d5cf794bc1fbfabc8
| 3,000
|
py
|
Python
|
examples/arrays_simple_vs_complex.py
|
infoxchange/spyne
|
60ed622b088c13f4f84c81f1f43302edbc7f6027
|
[
"BSD-3-Clause"
] | null | null | null |
examples/arrays_simple_vs_complex.py
|
infoxchange/spyne
|
60ed622b088c13f4f84c81f1f43302edbc7f6027
|
[
"BSD-3-Clause"
] | null | null | null |
examples/arrays_simple_vs_complex.py
|
infoxchange/spyne
|
60ed622b088c13f4f84c81f1f43302edbc7f6027
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# encoding: utf8
#
# Copyright © Burak Arslan <burak at arskom dot com dot tr>,
# Arskom Ltd. http://www.arskom.com.tr
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the owner nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
For testing different kind of arrays with different protocols.
"""
import logging
from spyne import Application, srpc, ServiceBase, Unicode, ComplexModel, Array
from spyne.protocol.xml import XmlDocument
from spyne.protocol.json import JsonDocument
from spyne.protocol.http import HttpRpc
from spyne.server.wsgi import WsgiApplication
class Permission(ComplexModel):
__namespace__ = 'some_ns'
application = Unicode
feature = Unicode
v = [
Permission(application='app', feature='f1'),
Permission(application='app', feature='f2'),
]
class HelloWorldService(ServiceBase):
@srpc(_returns=Array(Permission))
def simple():
return v
@srpc(_returns=Permission.customize(max_occurs=float('inf')))
def complex():
return v
if __name__=='__main__':
from wsgiref.simple_server import make_server
logging.basicConfig(level=logging.DEBUG)
application = Application([HelloWorldService], 'spyne.examples.hello.http',
in_protocol=HttpRpc(validator='soft'),
out_protocol=XmlDocument(),
)
wsgi_application = WsgiApplication(application)
server = make_server('127.0.0.1', 8000, wsgi_application)
logging.info("listening to http://127.0.0.1:8000")
logging.info("wsdl is at: http://localhost:8000/?wsdl")
server.serve_forever()
| 34.482759
| 80
| 0.739667
|
b1cbb88959b1b5270b4d0c5fabde20462dc02a10
| 588
|
py
|
Python
|
lib/notification_service/notification_service/__init__.py
|
flink-extended/ai-flow
|
d1427a243097d94d77fedbe1966500ae26975a13
|
[
"Apache-2.0"
] | 79
|
2021-10-15T07:32:27.000Z
|
2022-03-28T04:10:19.000Z
|
ai_flow/graph/__init__.py
|
flink-extended/ai-flow
|
d1427a243097d94d77fedbe1966500ae26975a13
|
[
"Apache-2.0"
] | 153
|
2021-10-15T05:23:46.000Z
|
2022-02-23T06:07:10.000Z
|
ai_flow/protobuf/__init__.py
|
flink-extended/ai-flow
|
d1427a243097d94d77fedbe1966500ae26975a13
|
[
"Apache-2.0"
] | 23
|
2021-10-15T02:36:37.000Z
|
2022-03-17T02:59:27.000Z
|
#
# Copyright 2022 The AI Flow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
| 34.588235
| 66
| 0.756803
|
cae4e94b46cc6877397daef82f8d8c93f4d93918
| 2,164
|
py
|
Python
|
plugins/active_directory_ldap/komand_active_directory_ldap/actions/add_user/action.py
|
JaredAllen13/insightconnect-plugins
|
f68ce8c60ad20439284228dfcbcd9f8c1c0c7d31
|
[
"MIT"
] | null | null | null |
plugins/active_directory_ldap/komand_active_directory_ldap/actions/add_user/action.py
|
JaredAllen13/insightconnect-plugins
|
f68ce8c60ad20439284228dfcbcd9f8c1c0c7d31
|
[
"MIT"
] | null | null | null |
plugins/active_directory_ldap/komand_active_directory_ldap/actions/add_user/action.py
|
JaredAllen13/insightconnect-plugins
|
f68ce8c60ad20439284228dfcbcd9f8c1c0c7d31
|
[
"MIT"
] | null | null | null |
import insightconnect_plugin_runtime
# Custom imports below
from .schema import AddUserInput, AddUserOutput, Output, Input
class AddUser(insightconnect_plugin_runtime.Action):
def __init__(self):
super(self.__class__, self).__init__(
name="add_user",
description="Adds the AD User specified",
input=AddUserInput(),
output=AddUserOutput(),
)
def run(self, params={}):
use_ssl = self.connection.use_ssl
domain_name = params.get(Input.DOMAIN_NAME)
first_name = params.get(Input.FIRST_NAME)
last_name = params.get(Input.LAST_NAME)
logon_name = params.get(Input.LOGON_NAME)
user_ou = params.get(Input.USER_OU)
account_disabled = params.get(Input.ACCOUNT_DISABLED)
password = params.get(Input.PASSWORD)
additional_parameters = params.get(Input.ADDITIONAL_PARAMETERS)
user_principal_name = params.get(Input.USER_PRINCIPAL_NAME)
if account_disabled or not use_ssl:
user_account_control = 514
else:
user_account_control = 512
full_name = first_name + " " + last_name
domain_dn = domain_name.replace(".", ",DC=")
if user_ou == "Users":
user_ou = user_ou.replace(",", ",CN=")
else:
user_ou = user_ou.replace(",", ",OU=")
if user_ou == "Users":
dn = f"CN={full_name},CN={user_ou},DC={domain_dn}"
else:
dn = f"CN={full_name},OU={user_ou},DC={domain_dn}"
self.logger.info("User DN=" + dn)
parameters = {
"givenName": first_name,
"sn": last_name,
"sAMAccountName": logon_name,
"userPassword": password,
"userPrincipalName": user_principal_name,
}
if additional_parameters:
parameters.update(additional_parameters)
log_parameters = parameters
log_parameters.pop("userPassword")
self.logger.info(log_parameters)
return {
Output.SUCCESS: self.connection.client.add_user(dn, user_account_control, use_ssl, password, parameters)
}
| 34.903226
| 116
| 0.616451
|
57eb891a503f868e14859f7f55a5acf0c1fe712d
| 5,800
|
py
|
Python
|
tensorflow_datasets/question_answering/tydi_qa.py
|
gijswijnholds/datasets
|
a58756db17aeacc15254a0e106ad1207d0ac01cc
|
[
"Apache-2.0"
] | 1
|
2021-02-04T10:07:18.000Z
|
2021-02-04T10:07:18.000Z
|
tensorflow_datasets/question_answering/tydi_qa.py
|
gijswijnholds/datasets
|
a58756db17aeacc15254a0e106ad1207d0ac01cc
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_datasets/question_answering/tydi_qa.py
|
gijswijnholds/datasets
|
a58756db17aeacc15254a0e106ad1207d0ac01cc
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""TyDi QA: Information-Seeking QA in Typologically Diverse Languages."""
# TODO(adarob): Add primary tasks (SelectP and MinSpan).
import os
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.question_answering import qa_utils
_CITATION = """\
@article{tydiqa,
title = {TyDi QA: A Benchmark for Information-Seeking Question Answering in Typologically Diverse Languages},
author = {Jonathan H. Clark and Eunsol Choi and Michael Collins and Dan Garrette and Tom Kwiatkowski and Vitaly Nikolaev and Jennimaria Palomaki}
year = {2020},
journal = {Transactions of the Association for Computational Linguistics}
}
"""
_DESCRIPTION = """\
TyDi QA is a question answering dataset covering 11 typologically diverse \
languages with 204K question-answer pairs. The languages of TyDi QA are \
diverse with regard to their typology -- the set of linguistic features that \
each language expresses -- such that we expect models performing well on this \
set to generalize across a large number of the languages in the world. It \
contains language phenomena that would not be found in English-only corpora. \
To provide a realistic information-seeking task and avoid priming effects, \
questions are written by people who want to know the answer, but don’t know \
the answer yet, (unlike SQuAD and its descendents) and the data is collected \
directly in each language without the use of translation (unlike MLQA and \
XQuAD).
IMPORTANT: Please choose your training split carefully.
Training splits:
'train': This is the GoldP task from the original TyDi QA paper \
[https://arxiv.org/abs/2003.05002] that has original-language labeled \
training data.
'translate-train-*': These splits are the automatic translations from English \
to each target language used in the translate-train baselines in the XTREME \
paper [https://arxiv.org/abs/2003.11080]. This purposefully ignores the \
non-English TyDiQA-GoldP training data to simulate the transfer learning \
scenario where original-language data is not available and system builders \
must rely on labeled English data plus existing machine translation systems.
Typically, you should use EITHER the train or translate-train split, but not both.
"""
LANGUAGES = {
"ar": "arabic",
"bn": "bengali",
"en": "english",
"fi": "finnish",
"id": "indonesian",
"ko": "korean",
"ru": "russian",
"sw": "swahili",
"te": "telugu",
}
_GOLD_URL_PREFIX = "https://storage.googleapis.com/tydiqa/v1.1/tydiqa-goldp-v1.1-"
_GOLD_TRANSLATE_URL_FORMAT = "https://storage.googleapis.com/xtreme_translations/TyDiQA-GoldP/translate-train/tydiqa.translate.train.en-{lang_iso}.json"
class TydiQAConfig(tfds.core.BuilderConfig):
"""BuilderConfig for TydiQa."""
class TydiQA(tfds.core.GeneratorBasedBuilder):
"""TyDi QA: Information-Seeking QA in Typologically Diverse Languages."""
BUILDER_CONFIGS = [
TydiQAConfig(
name="goldp",
description="Gold passage (GoldP) task (https://github.com/google-research-datasets/tydiqa/tree/master/gold_passage_baseline).",
version=tfds.core.Version("2.0.0"),
),
]
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=qa_utils.SQUADLIKE_FEATURES,
# No default supervised_keys (as we have to pass both question
# and context as input).
supervised_keys=None,
homepage="https://github.com/google-research-datasets/tydiqa",
citation=_CITATION,
)
def _split_generators(self, dl_manager):
urls_to_download = {
"train": _GOLD_URL_PREFIX + "train.json",
"validation": _GOLD_URL_PREFIX + "dev.json",
"lang-validation": _GOLD_URL_PREFIX + "dev.tgz",
}
for lang_iso in LANGUAGES:
if lang_iso == "en":
continue
urls_to_download[
f"translate-train-{lang_iso}"] = _GOLD_TRANSLATE_URL_FORMAT.format(
lang_iso=lang_iso)
downloaded_files = dl_manager.download_and_extract(urls_to_download)
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={"filepath": downloaded_files["train"]}),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={"filepath": downloaded_files["validation"]}),
] + [
tfds.core.SplitGenerator( # pylint:disable=g-complex-comprehension
name=f"validation-{lang_iso}",
gen_kwargs={
"filepath":
os.path.join(downloaded_files["lang-validation"],
f"tydiqa-goldp-v1.1-dev/tydiqa-goldp-dev-{lang_name}.json")
})
for lang_iso, lang_name in LANGUAGES.items()
] + [
tfds.core.SplitGenerator( # pylint:disable=g-complex-comprehension
name=f"translate-train-{lang_iso}",
gen_kwargs={
"filepath": downloaded_files[f"translate-train-{lang_iso}"]
})
for lang_iso, lang_name in LANGUAGES.items() if lang_iso != "en"
]
def _generate_examples(self, filepath):
return qa_utils.generate_squadlike_examples(filepath)
| 38.666667
| 152
| 0.704138
|
8d518409557e684da19870f518a31a16387b21d0
| 838
|
py
|
Python
|
rl_coach/tests/architectures/mxnet_components/embedders/test_vector_embedder.py
|
jl45621/coach
|
9a895a1ac73aff44b2e6eb8e4d01e8ec35ceb084
|
[
"Apache-2.0"
] | 1,960
|
2017-10-19T10:31:24.000Z
|
2020-11-07T18:19:23.000Z
|
rl_coach/tests/architectures/mxnet_components/embedders/test_vector_embedder.py
|
jl45621/coach
|
9a895a1ac73aff44b2e6eb8e4d01e8ec35ceb084
|
[
"Apache-2.0"
] | 349
|
2017-10-21T17:17:18.000Z
|
2020-10-17T13:39:56.000Z
|
rl_coach/tests/architectures/mxnet_components/embedders/test_vector_embedder.py
|
jl45621/coach
|
9a895a1ac73aff44b2e6eb8e4d01e8ec35ceb084
|
[
"Apache-2.0"
] | 428
|
2017-10-21T01:32:58.000Z
|
2020-11-07T13:49:49.000Z
|
import mxnet as mx
import os
import pytest
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
from rl_coach.architectures.embedder_parameters import InputEmbedderParameters
from rl_coach.architectures.mxnet_components.embedders.vector_embedder import VectorEmbedder
from rl_coach.base_parameters import EmbedderScheme
@pytest.mark.unit_test
def test_vector_embedder():
params = InputEmbedderParameters(scheme=EmbedderScheme.Medium)
emb = VectorEmbedder(params=params)
emb.initialize()
input_data = mx.nd.random.uniform(low=0, high=255, shape=(10, 100))
output = emb(input_data)
assert len(output.shape) == 2 # since last block was flatten
assert output.shape[0] == 10 # since batch_size is 10
assert output.shape[1] == 256 # since last dense layer has 256 units
| 36.434783
| 92
| 0.775656
|
7bbfa9207b5966f94b1422ef1737323620d466e3
| 1,910
|
py
|
Python
|
CCC 2002/Junior/J1.py
|
hand-burger/CCC-Solutions
|
7136c7bb3d0333a4658825d686b5c97e03860032
|
[
"MIT"
] | 2
|
2021-07-20T18:33:05.000Z
|
2021-07-20T18:33:08.000Z
|
CCC 2002/Junior/J1.py
|
hand-burger/CCC-Solutions
|
7136c7bb3d0333a4658825d686b5c97e03860032
|
[
"MIT"
] | null | null | null |
CCC 2002/Junior/J1.py
|
hand-burger/CCC-Solutions
|
7136c7bb3d0333a4658825d686b5c97e03860032
|
[
"MIT"
] | null | null | null |
num = int(input())
if num == 0:
print(' * * *')
print('* *')
print('* *')
print('* *')
print()
print('* *')
print('* *')
print('* *')
print(' * * *')
elif num == 1:
print()
print(' *')
print(' *')
print(' *')
print()
print(' *')
print(' *')
print(' *')
print()
elif num == 2:
print(' * * *')
print(' *')
print(' *')
print(' *')
print(' * * *')
print('*')
print('*')
print('*')
print(' * * *')
elif num == 3:
print(' * * *')
print(' *')
print(' *')
print(' *')
print(' * * *')
print(' *')
print(' *')
print(' *')
print(' * * *')
elif num == 4:
print()
print('* *')
print('* *')
print('* *')
print(' * * *')
print(' *')
print(' *')
print(' *')
print()
elif num == 5:
print(' * * *')
print('*')
print('*')
print('*')
print(' * * *')
print(' *')
print(' *')
print(' *')
print(' * * *')
elif num == 6:
print(' * * *')
print('*')
print('*')
print('*')
print(' * * *')
print('* *')
print('* *')
print('* *')
print(' * * *')
elif num == 7:
print(' * * *')
print(' *')
print(' *')
print(' *')
print()
print(' *')
print(' *')
print(' *')
print()
elif num == 8:
print(' * * *')
print('* *')
print('* *')
print('* *')
print(' * * *')
print('* *')
print('* *')
print('* *')
print(' * * *')
elif num == 9:
print(' * * *')
print('* *')
print('* *')
print('* *')
print(' * * *')
print(' *')
print(' *')
print(' *')
print(' * * *')
| 18.543689
| 20
| 0.282199
|
02fec4799e97b0380881c62389e53243d6fa225d
| 1,569
|
py
|
Python
|
HackerRank/Two_Strings.py
|
RafayAK/CodingPrep
|
718eccb439db0f6e727806964766a40e8234c8a9
|
[
"MIT"
] | 5
|
2019-09-07T17:31:17.000Z
|
2022-03-05T09:59:46.000Z
|
HackerRank/Two_Strings.py
|
RafayAK/CodingPrep
|
718eccb439db0f6e727806964766a40e8234c8a9
|
[
"MIT"
] | null | null | null |
HackerRank/Two_Strings.py
|
RafayAK/CodingPrep
|
718eccb439db0f6e727806964766a40e8234c8a9
|
[
"MIT"
] | 2
|
2019-09-07T17:31:24.000Z
|
2019-10-28T16:10:52.000Z
|
'''
Given two strings, determine if they share a common substring. A substring may be as small as one character.
For example, the words "a", "and", "art" share the common substring . The words "be" and "cat" do not share a substring.
Function Description
Complete the function twoStrings in the editor below. It should return a string, either YES or NO based on whether the strings share a common substring.
twoStrings has the following parameter(s):
s1, s2: two strings to analyze .
Input Format
The first line contains a single integer , the number of test cases.
The following pairs of lines are as follows:
The first line contains string .
The second line contains string .
Constraints
and consist of characters in the range ascii[a-z].
Output Format
For each pair of strings, return YES or NO.
Sample Input
2
hello
world
hi
world
Sample Output
YES
NO
'''
# !/bin/python3
import math
import os
import random
import re
import sys
from collections import Counter
# Complete the twoStrings function below.
def twoStrings(s1, s2):
# only need to satisfy if has any matching letter
dicty = Counter(s1)
found = False
for letter in s2:
if dicty[letter] != 0:
found=True
break
if found:
print('YES')
else:
print('NO')
if __name__ == '__main__':
#fptr = open(os.environ['OUTPUT_PATH'], 'w')
q = int(input())
for q_itr in range(q):
s1 = input()
s2 = input()
result = twoStrings(s1, s2)
#fptr.write(result + '\n')
#fptr.close()
| 19.37037
| 152
| 0.678139
|
81ed1bf36b67478db37001c0a026b99d145708eb
| 1,006
|
py
|
Python
|
configs/selfsup/mae/mae_vit-base-p16_8xb512-coslr-400e_severstal.py
|
nolaurence/mmselfsup
|
76b85cffaa9b8c00a57f4426ce0a936cba4b98ff
|
[
"Apache-2.0"
] | null | null | null |
configs/selfsup/mae/mae_vit-base-p16_8xb512-coslr-400e_severstal.py
|
nolaurence/mmselfsup
|
76b85cffaa9b8c00a57f4426ce0a936cba4b98ff
|
[
"Apache-2.0"
] | null | null | null |
configs/selfsup/mae/mae_vit-base-p16_8xb512-coslr-400e_severstal.py
|
nolaurence/mmselfsup
|
76b85cffaa9b8c00a57f4426ce0a936cba4b98ff
|
[
"Apache-2.0"
] | null | null | null |
_base_ = [
'../_base_/models/mae_vit-base-p16.py',
'../_base_/datasets/severstal.py',
'../_base_/schedules/adamw_coslr-200e_in1k.py',
'../_base_/default_runtime.py',
]
# dataset
data = dict(samples_per_gpu=64, workers_per_gpu=1)
# optimizer
optimizer = dict(
lr=1.5e-4 * 64 / 256,
paramwise_options={
'norm': dict(weight_decay=0.),
'bias': dict(weight_decay=0.),
'pos_embed': dict(weight_decay=0.),
'mask_token': dict(weight_decay=0.),
'cls_token': dict(weight_decay=0.)
})
optimizer_config = dict()
# learning policy
lr_config = dict(
policy='StepFixCosineAnnealing',
min_lr=0.0,
warmup='linear',
warmup_iters=40,
warmup_ratio=1e-4,
warmup_by_epoch=True,
by_epoch=False)
# schedule
runner = dict(max_epochs=400)
# runtime
checkpoint_config = dict(interval=1, max_keep_ckpts=3, out_dir='')
persistent_workers = True
log_config = dict(
interval=100, hooks=[
dict(type='TextLoggerHook'),
])
| 23.395349
| 66
| 0.654076
|
ac98e5a5e5ccff0743074a21e3862b56f278c443
| 3,947
|
py
|
Python
|
q2_coordinates/tests/test_stats.py
|
antgonza/q2-coordinates
|
fe1cce8eb14ff4ca7f305010c69d5747878cb084
|
[
"BSD-3-Clause"
] | null | null | null |
q2_coordinates/tests/test_stats.py
|
antgonza/q2-coordinates
|
fe1cce8eb14ff4ca7f305010c69d5747878cb084
|
[
"BSD-3-Clause"
] | null | null | null |
q2_coordinates/tests/test_stats.py
|
antgonza/q2-coordinates
|
fe1cce8eb14ff4ca7f305010c69d5747878cb084
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2017--, QIIME 2 development team.
#
# Distributed under the terms of the Lesser GPL 3.0 licence.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from .test_coordinates import CoordinatesTestPluginBase
from qiime2.plugins import coordinates
import qiime2
import pandas as pd
import numpy as np
from skbio import DistanceMatrix
import pandas.util.testing as pdt
from q2_coordinates.stats import autocorr_from_dm, match_ids
from q2_coordinates._utilities import _load_and_validate
# these tests make sure the actions run and accept appropriate inputs
class TestStats(CoordinatesTestPluginBase):
def setUp(self):
super().setUp()
self.tmpd = self.temp_dir.name
dm_fp = self.get_data_path('geodesic_distance_matrix.qza')
self.dm = qiime2.Artifact.load(dm_fp)
alpha_fp = self.get_data_path('alpha_diversity.qza')
alpha = qiime2.Artifact.load(alpha_fp)
self.alpha = alpha.view(qiime2.Metadata).get_column('observed_otus')
# does it run
def test_autocorr(self):
coordinates.actions.autocorr(
distance_matrix=self.dm,
metadata=self.alpha,
intersect_ids=True)
def test_autocorr_nonintersecting_ids_warning(self):
with self.assertRaisesRegex(ValueError, "matrix are missing"):
coordinates.actions.autocorr(
distance_matrix=self.dm,
metadata=self.alpha,
intersect_ids=False)
def test_match_ids(self):
md = pd.Series({'peanuts': [1, 2, 3, 4]})
distances = [[0, 1, 1, 1], [1, 0, 1, 1], [1, 1, 0, 1], [1, 1, 1, 0]]
dm = DistanceMatrix(distances, ids=['a', 'b', 'c', 'd'])
with self.assertRaisesRegex(ValueError, "No samples match"):
match_ids(md, dm, intersect_ids=True)
def test_autocorr_from_dm(self):
np.random.seed(124)
exp = pd.DataFrame(
{'Moran\'s I':
[-0.00975936992946, -0.0909090909091, 1.08994430817,
0.275737677144],
'Geary\'s C':
[0.715863556271, 1.0, -1.32713928249, 0.0922313064718]},
index=['Test Statistic', 'Expected Value', 'Z norm', 'p norm'])
distance_matrix = self.dm.view(DistanceMatrix)
metadata = self.alpha.to_series()
metadata, distance_matrix = match_ids(
metadata, distance_matrix, intersect_ids=True)
results, weights = autocorr_from_dm(
metadata, distance_matrix, permutations=0,
two_tailed=True, transformation='R')
pdt.assert_frame_equal(results, exp)
class TestUtilities(CoordinatesTestPluginBase):
def test_load_and_validate(self):
md = pd.DataFrame(
{'a': [1, 2, 3], 'b': [2, 3, np.nan], 'c': [0, 0, 0]},
index=['a', 'b', 'c'])
md.index.name = 'sampleid'
md = qiime2.Metadata(md)
# pass: select only valid columns
md2 = _load_and_validate(
md, ['a', 'c'], ['a', 'c'], missing_data="error")
exp = pd.DataFrame({'a': [1, 2, 3], 'c': [0, 0, 0]},
index=['a', 'b', 'c'])
pdt.assert_frame_equal(md2, exp, check_dtype=False, check_names=False)
# pass: ignore nans
md2 = _load_and_validate(
md, ['a', 'b', 'c'], ['a', 'b', 'c'], missing_data="ignore")
exp = pd.DataFrame({'a': [1, 2], 'b': [2, 3], 'c': [0, 0]},
index=['a', 'b'])
pdt.assert_frame_equal(md2, exp, check_dtype=False, check_names=False)
# error: catch nans
with self.assertRaisesRegex(ValueError, "missing metadata"):
md2 = _load_and_validate(
md, ['a', 'b', 'c'], ['a', 'b', 'c'], missing_data="error")
| 40.690722
| 78
| 0.578921
|
50d4db3758221ff2f38349cdc736995119435779
| 7,172
|
py
|
Python
|
pywb/ftwhelper.py
|
Pterosaur/WAFBench
|
235438fe106445de375e01f3b0c9a7c43c1dc223
|
[
"MIT"
] | 67
|
2019-05-12T06:46:30.000Z
|
2022-02-11T16:30:50.000Z
|
pywb/ftwhelper.py
|
Pterosaur/WAFBench
|
235438fe106445de375e01f3b0c9a7c43c1dc223
|
[
"MIT"
] | 4
|
2019-09-17T09:29:47.000Z
|
2021-02-24T11:49:51.000Z
|
pywb/ftwhelper.py
|
Pterosaur/WAFBench
|
235438fe106445de375e01f3b0c9a7c43c1dc223
|
[
"MIT"
] | 19
|
2019-07-01T05:08:36.000Z
|
2021-06-20T03:36:55.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
""" FTW helper
This export:
- FTW_TYPE: is a enum that contains RULE, TEST, STAGE, PACKETS.
- FtwDict: is a subclass of dict, that store the data from ftw
- FtwStr: is a subclass of str, that store the data from ftw
- get: is a function to get a target_type generator from sources.
This is a wrapper to provied the access to FTW(https://github.com/fastly/ftw).
"""
import os
import yaml
import types
import ftw
import pywbutil
yaml.warnings({'YAMLLoadWarning': False})
__all__ = [
"FTW_TYPE",
"FtwDict",
"FtwStr",
"get",
]
class FTW_TYPE(object):
""" FTW_TYPE
RULE is a FtwDict
TEST is a FtwDict
STAGE is a FtwDict
PACKETS is a FtwStr
"""
RULE = 0
TEST = 1
STAGE = 2
PACKETS = 3
INVALID = 4
class FtwDict(dict):
""" Store the data from ftw
Argument:
- ftw_type: a value of FTW_TYPE.
- original_file: a string of path to specified
where this dict come from
- original_data: an internal type of ftw.
- *args, **kw: arguments to initialize a dict.
It's the dict format of original data.
"""
def __new__(cls, ftw_type, original_file, original_data, *args, **kw):
obj = dict.__new__(cls, *args, **kw)
return obj
def __init__(self, ftw_type, original_file, original_data, *args, **kw):
self.update(*args, **kw)
self.FTW_TYPE = ftw_type
self.ORIGINAL_FILE = original_file
self.ORIGINAL_DATA = original_data
class FtwStr(str):
""" Store the data from ftw
Argument:
- ftw_type: a value of FTW_TYPE.
- original_file: a string of path to specified
where this dict come from
- original_data: an string.
"""
def __new__(cls, ftw_type, original_file, original_data):
obj = str.__new__(cls, original_data)
return obj
def __init__(self, ftw_type, original_file, original_data):
self.FTW_TYPE = ftw_type
self.ORIGINAL_FILE = original_file
@pywbutil.accept_iterable
@pywbutil.expand_nest_generator
def _load_ftw_rules_from_strings(strings):
for string_ in strings:
ftw_rule = ftw.ruleset.Ruleset(yaml.load(string_, Loader=yaml.FullLoader))
rule = FtwDict(
FTW_TYPE.RULE,
None,
ftw_rule,
ftw_rule.yaml_file)
yield rule
@pywbutil.accept_iterable
@pywbutil.expand_nest_generator
def _load_ftw_rules_from_files(files):
for file_ in files:
file_ = os.path.abspath(os.path.expanduser(file_))
if os.path.splitext(file_)[-1].lower() != ".yaml":
raise ValueError(file_ + "is not a .yaml file")
rules = ftw.util.get_rulesets(file_, False)
for ftw_rule in rules:
rule = FtwDict(
FTW_TYPE.RULE,
file_,
ftw_rule,
ftw_rule.yaml_file)
yield rule
@pywbutil.accept_iterable
@pywbutil.expand_nest_generator
def _load_ftw_rules_from_paths(paths):
for path_ in paths:
path_ = os.path.abspath(os.path.expanduser(path_))
if os.path.isdir(path_):
for root, _, files in os.walk(path_):
for file_ in files:
file_ext = os.path.splitext(file_)[-1].lower()
if file_ext != ".yaml":
continue
yield _load_ftw_rules_from_files(
os.path.join(root, file_))
elif os.path.isfile(path_):
file_ext = os.path.splitext(path_)[-1].lower()
if file_ext != ".yaml":
raise ValueError(path_ + " is not YAML file with .yaml")
yield _load_ftw_rules_from_files((path_))
else:
raise IOError("No such file or path: '%s'" % (path_, ))
def _convert(source, target_type):
if not hasattr(source, "FTW_TYPE") \
or source.FTW_TYPE == FTW_TYPE.INVALID \
or target_type == FTW_TYPE.INVALID:
raise ValueError("%s is invalid type" % (source, ))
if source.FTW_TYPE > FTW_TYPE:
raise ValueError(
"Cannot do this upper convert from %s to %s"
% (source.FTW_TYPE, FTW_TYPE))
if source.FTW_TYPE == FTW_TYPE:
yield source
# ftw.stage => pkt
elif source.FTW_TYPE == FTW_TYPE.STAGE \
and target_type == FTW_TYPE.PACKETS:
http_ua = ftw.http.HttpUA()
http_ua.request_object = source.ORIGINAL_DATA.input
http_ua.build_request()
packet = FtwStr(
FTW_TYPE.PACKETS,
source.ORIGINAL_FILE,
http_ua.request)
yield packet
# ftw.test => ftw.stage
elif source.FTW_TYPE == FTW_TYPE.TEST \
and target_type == FTW_TYPE.STAGE:
for ftw_stage in source.ORIGINAL_DATA.stages:
stage = FtwDict(
FTW_TYPE.STAGE,
source.ORIGINAL_FILE,
ftw_stage,
ftw_stage.stage_dict)
yield stage
# ftw.rule => ftw.test
elif source.FTW_TYPE == FTW_TYPE.RULE \
and target_type == FTW_TYPE.TEST:
for ftw_test in source.ORIGINAL_DATA.tests:
test = FtwDict(
FTW_TYPE.TEST,
source.ORIGINAL_FILE,
ftw_test,
ftw_test.test_dict)
yield test
# ftw.* => ftw.*
else:
internal_type = source.FTW_TYPE + 1
source = _convert(source, internal_type)
visitor = source.__iter__()
visit_stack = [visitor]
while visit_stack:
visitor = visit_stack[-1]
try:
visitor = next(visitor)
if visitor.FTW_TYPE < target_type:
visitor = _convert(visitor, visitor.FTW_TYPE + 1)
visit_stack.append(visitor)
else:
yield visitor
except StopIteration:
visit_stack.pop()
def get(source, target_type):
""" Get a target_type generator from sources.
Arguments:
source:
a set of paths with YAML extension to ftw
and string with YAML format to ftw
or
objects that comes from ftwhelper.get
target_type: a enum of FTW_TYPE to specify the generator type
Return a generator that generate target_type
"""
if hasattr(source, "FTW_TYPE"):
for item in _convert(source, target_type):
yield item
else:
if not hasattr(source, "__iter__"):
sources = [source]
else:
sources = source
for source in sources:
path_ = os.path.abspath(os.path.expanduser(source))
if os.path.exists(path_):
rules = _load_ftw_rules_from_paths(path_)
else:
rules = _load_ftw_rules_from_strings(source)
for rule in rules:
for destination in _convert(rule, target_type):
yield destination
| 30.519149
| 82
| 0.585192
|
612112030590e1d710e6f0301ddd6586ea2709cb
| 807
|
py
|
Python
|
QandAProj/manage.py
|
Olek300/QandA
|
b740ece3b7dca8e0bd3c4f62e1a4d91e44b2b868
|
[
"BSD-3-Clause"
] | null | null | null |
QandAProj/manage.py
|
Olek300/QandA
|
b740ece3b7dca8e0bd3c4f62e1a4d91e44b2b868
|
[
"BSD-3-Clause"
] | null | null | null |
QandAProj/manage.py
|
Olek300/QandA
|
b740ece3b7dca8e0bd3c4f62e1a4d91e44b2b868
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "QandAProj.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.086957
| 77
| 0.643123
|
af668bff702327b51b72819d8beab3bc7df642e3
| 6,091
|
py
|
Python
|
alistock/settings.py
|
taojy123/AliStock
|
1bedb6fbc985b1e062a7a9c04ea7a23e56f7821c
|
[
"MIT"
] | null | null | null |
alistock/settings.py
|
taojy123/AliStock
|
1bedb6fbc985b1e062a7a9c04ea7a23e56f7821c
|
[
"MIT"
] | null | null | null |
alistock/settings.py
|
taojy123/AliStock
|
1bedb6fbc985b1e062a7a9c04ea7a23e56f7821c
|
[
"MIT"
] | null | null | null |
# Django settings for alistock project.
import os
import uuid
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
LOGIN_URL = '/loginpage/'
if 'SERVER_SOFTWARE' in os.environ:
from sae.const import (
MYSQL_HOST, MYSQL_PORT, MYSQL_USER, MYSQL_PASS, MYSQL_DB
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': MYSQL_DB,
'USER': MYSQL_USER,
'PASSWORD': MYSQL_PASS,
'HOST': MYSQL_HOST,
'PORT': MYSQL_PORT,
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'data.db', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.getcwd(), 'static').replace('\\','/').decode("gbk"),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mc8iwu&l9l**d-qcu5)l02woe^7@44t#(&2p85bw)+mrp#y6zn-b350b0cf-a22c-4a5d-9459-2b865590a8d4'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'alistock.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'alistock.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.getcwd(), 'templates').replace('\\','/').decode("gbk"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'alistock',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| 34.027933
| 112
| 0.669348
|
d642bbb441ec8865ebf2d698779faee2c0fc47f9
| 4,062
|
py
|
Python
|
test/integration/ggrc/proposal/test_proposal_email.py
|
j0gurt/ggrc-core
|
84662dc85aa8864c907eabe70b8efccf92298a1f
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-01-04T10:55:14.000Z
|
2019-01-04T10:55:14.000Z
|
test/integration/ggrc/proposal/test_proposal_email.py
|
farcry4998/ggrc-core
|
c469039dabb55033c1b379850feb19e8dda2e2a1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/integration/ggrc/proposal/test_proposal_email.py
|
farcry4998/ggrc-core
|
c469039dabb55033c1b379850feb19e8dda2e2a1
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2018 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""This module contains test about sending emails for proposals."""
import ddt
import mock
from ggrc.notifications import fast_digest
from integration.ggrc import TestCase
from integration.ggrc.api_helper import Api
from integration.ggrc.models import factories
@ddt.ddt
class TestProposalEmail(TestCase):
"""Test case about email sending and email presenting for proposals."""
def setUp(self):
super(TestProposalEmail, self).setUp()
self.api = Api()
self.client.get("/login")
@ddt.data(True, False)
def test_email_presentation(self, is_admin):
"""Test presentation of proposal digest email if is_admin is {0}."""
person = factories.PersonFactory()
self.api.set_user(person=person)
with mock.patch("ggrc.rbac.permissions.is_admin", return_value=is_admin):
resp = self.client.get("/_notifications/show_fast_digest")
if is_admin:
self.assert200(resp)
else:
self.assert403(resp)
def test_email_sending(self):
"""Test sending emails about proposals."""
role_1 = factories.AccessControlRoleFactory(object_type="Control",
notify_about_proposal=True)
role_2 = factories.AccessControlRoleFactory(object_type="Control",
notify_about_proposal=True)
role_3 = factories.AccessControlRoleFactory(object_type="Control",
notify_about_proposal=False)
with factories.single_commit():
control = factories.ControlFactory()
person_1 = factories.PersonFactory() # has 1 role
person_2 = factories.PersonFactory() # has no roles
person_3 = factories.PersonFactory() # has 2 roles
factories.PersonFactory() # not related to control at all
factories.AccessControlPersonFactory(
ac_list=control.acr_acl_map[role_1],
person=person_1
)
factories.AccessControlPersonFactory(
ac_list=control.acr_acl_map[role_1],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=control.acr_acl_map[role_2],
person=person_3
)
factories.AccessControlPersonFactory(
ac_list=control.acr_acl_map[role_3],
person=person_2
)
proposal_1 = factories.ProposalFactory(
instance=control,
content={
"fields": {"title": "a"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 1")
proposal_2 = factories.ProposalFactory(
instance=control,
content={
"fields": {"title": "b"},
"access_control_list": {},
"custom_attribute_values": {},
"mapping_fields": {},
"mapping_list_fields": {},
},
agenda="agenda 2")
self.assertIsNone(proposal_1.proposed_notified_datetime)
self.assertIsNone(proposal_2.proposed_notified_datetime)
with mock.patch("google.appengine.api.mail.send_mail") as mailer_mock:
with mock.patch.object(fast_digest.DIGEST_TMPL,
"render") as bodybuilder_mock:
fast_digest.send_notification()
self.assertIsNotNone(proposal_1.proposed_notified_datetime)
self.assertIsNotNone(proposal_2.proposed_notified_datetime)
self.assertEqual(2, len(bodybuilder_mock.call_args_list))
self.assertEqual(2, len(mailer_mock.call_args_list))
# email to each required person
self.assertListEqual(
sorted([person_1.email, person_3.email]),
sorted([a[1]["to"] for a in mailer_mock.call_args_list]))
# no matter how many roles each proposal should be otified
# only once for that person
self.assertListEqual(
[2] * 2,
[len(a[1]["proposals"]) for a in bodybuilder_mock.call_args_list])
| 39.436893
| 78
| 0.651403
|
a2bd5a1e86a8bf866a1d15e264c2dcdd299df47c
| 3,998
|
py
|
Python
|
catalyst/dl/scripts/run.py
|
ssktotoro/catalyst
|
2ff687e802250772f8614583af933d6613f87788
|
[
"Apache-2.0"
] | 1
|
2021-03-02T12:06:32.000Z
|
2021-03-02T12:06:32.000Z
|
catalyst/dl/scripts/run.py
|
ssktotoro/catalyst
|
2ff687e802250772f8614583af933d6613f87788
|
[
"Apache-2.0"
] | null | null | null |
catalyst/dl/scripts/run.py
|
ssktotoro/catalyst
|
2ff687e802250772f8614583af933d6613f87788
|
[
"Apache-2.0"
] | 1
|
2021-06-11T16:33:30.000Z
|
2021-06-11T16:33:30.000Z
|
#!/usr/bin/env python
import argparse
from argparse import ArgumentParser
import os
from pathlib import Path
import sys
from catalyst.dl.scripts.misc import parse_args_uargs
from catalyst.runners.config import ConfigRunner
from catalyst.settings import SETTINGS
from catalyst.utils.distributed import get_rank
from catalyst.utils.misc import boolean_flag, set_global_seed
from catalyst.utils.sys import dump_code, dump_environment, get_config_runner
from catalyst.utils.torch import prepare_cudnn
if SETTINGS.hydra_required:
from catalyst.dl.scripts.hydra_run import main as hydra_main
def build_args(parser: ArgumentParser):
"""Constructs the command-line arguments for ``catalyst-dl run``."""
parser.add_argument(
"--config",
"--configs",
"-C",
nargs="+",
help="path to config/configs",
metavar="CONFIG_PATH",
dest="configs",
required=False,
)
parser.add_argument("--expdir", type=str, default=None)
parser.add_argument("--logdir", type=str, default=None)
parser.add_argument("--baselogdir", type=str, default=None)
# parser.add_argument(
# "--resume", default=None, type=str, metavar="PATH", help="path to latest checkpoint",
# )
# parser.add_argument(
# "--autoresume",
# type=str,
# help=(
# "try automatically resume from logdir//{best,last}_full.pth " "if --resume is empty"
# ),
# required=False,
# choices=["best", "last"],
# default=None,
# )
parser.add_argument("--seed", type=int, default=42)
boolean_flag(
parser,
"apex",
default=os.getenv("USE_APEX", "0") == "1",
help="Enable/disable using of Nvidia Apex extension",
)
boolean_flag(
parser,
"amp",
default=os.getenv("USE_AMP", "0") == "1",
help="Enable/disable using of PyTorch AMP extension",
)
boolean_flag(
parser,
"fp16",
default=os.getenv("USE_FP16", "0") == "1",
help="Run in half-precision mode",
)
boolean_flag(
parser, "ddp", default=os.getenv("USE_DDP", "0") == "1", help="Run in distributed mode",
)
boolean_flag(parser, "verbose", default=None)
boolean_flag(parser, "timeit", default=None)
boolean_flag(parser, "check", default=None)
boolean_flag(parser, "overfit", default=None)
boolean_flag(
parser,
"deterministic",
default=None,
help="Deterministic mode if running in CuDNN backend",
)
boolean_flag(parser, "benchmark", default=None, help="Use CuDNN benchmark")
boolean_flag(parser, "hydra", default=None, help="Use Hydra")
return parser
def parse_args():
"""Parses the command line arguments and returns arguments and config."""
parser = argparse.ArgumentParser()
build_args(parser)
args, unknown_args = parser.parse_known_args()
return args, unknown_args
def config_main(args, unknown_args):
"""Yaml config catalyst-dl run entry point."""
args, config = parse_args_uargs(args, unknown_args)
set_global_seed(args.seed)
prepare_cudnn(args.deterministic, args.benchmark)
runner: ConfigRunner = get_config_runner(expdir=Path(args.expdir), config=config)
if get_rank() <= 0:
dump_environment(logdir=runner.logdir, config=config, configs_path=args.configs)
dump_code(expdir=args.expdir, logdir=runner.logdir)
runner.run()
def main(args, unknown_args):
"""Runs the ``catalyst-dl run`` script."""
if args.hydra:
assert SETTINGS.hydra_required, (
"catalyst[hydra] requirements are not available, to install them,"
" run `pip install catalyst[hydra]`."
)
if args.hydra:
sys.argv.remove("run")
sys.argv.remove("--hydra")
hydra_main()
else:
config_main(args, unknown_args)
if __name__ == "__main__":
args, unknown_args = parse_args()
main(args, unknown_args)
| 31.480315
| 98
| 0.650575
|
bccd083939925234f864dc919969cb6235ed7684
| 1,579
|
py
|
Python
|
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/ImportCredentialsRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 1,001
|
2015-07-24T01:32:41.000Z
|
2022-03-25T01:28:18.000Z
|
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/ImportCredentialsRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 363
|
2015-10-20T03:15:00.000Z
|
2022-03-08T12:26:19.000Z
|
aliyun-python-sdk-csb/aliyunsdkcsb/request/v20171118/ImportCredentialsRequest.py
|
yndu13/aliyun-openapi-python-sdk
|
12ace4fb39fe2fb0e3927a4b1b43ee4872da43f5
|
[
"Apache-2.0"
] | 682
|
2015-09-22T07:19:02.000Z
|
2022-03-22T09:51:46.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkcsb.endpoint import endpoint_data
class ImportCredentialsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'CSB', '2017-11-18', 'ImportCredentials')
self.set_protocol_type('https')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_Data(self):
return self.get_body_params().get('Data')
def set_Data(self,Data):
self.add_body_params('Data', Data)
def get_CsbId(self):
return self.get_query_params().get('CsbId')
def set_CsbId(self,CsbId):
self.add_query_param('CsbId',CsbId)
| 35.088889
| 74
| 0.757441
|
87fc2f58b1169f3f9946ace22ba4416a624c325e
| 3,706
|
py
|
Python
|
experiments/src/models/recognition/mtl.py
|
cricketclub/gridspace-stanford-harper-valley
|
0bd721e877c4a85d8c13ff837e68661ea6200a98
|
[
"CC-BY-4.0"
] | 10
|
2021-01-09T00:52:28.000Z
|
2022-03-29T09:16:32.000Z
|
experiments/src/models/recognition/mtl.py
|
cricketclub/gridspace-stanford-harper-valley
|
0bd721e877c4a85d8c13ff837e68661ea6200a98
|
[
"CC-BY-4.0"
] | null | null | null |
experiments/src/models/recognition/mtl.py
|
cricketclub/gridspace-stanford-harper-valley
|
0bd721e877c4a85d8c13ff837e68661ea6200a98
|
[
"CC-BY-4.0"
] | 7
|
2020-08-04T17:22:19.000Z
|
2022-02-15T06:03:49.000Z
|
import os
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.utils.rnn as rnn_utils
from src.models.recognition.las import LASEncoderDecoder
class MTLEncoderDecoder(LASEncoderDecoder):
def __init__(
self,
input_dim,
num_class,
label_maxlen,
listener_hidden_dim=256,
listener_num_layers=2,
listener_bidirectional=True,
speller_num_layers=1,
mlp_hidden_dim=128,
multi_head=1,
sos_index=0,
):
super().__init__(
input_dim,
num_class,
label_maxlen,
listener_hidden_dim=listener_hidden_dim,
listener_num_layers=listener_num_layers,
listener_bidirectional=listener_bidirectional,
speller_num_layers=speller_num_layers,
mlp_hidden_dim=mlp_hidden_dim,
multi_head=multi_head,
sos_index=sos_index,
)
self.ctc_decoder = CTCDecoder(listener_hidden_dim * 2, num_class)
def forward(
self,
inputs,
input_lengths,
ground_truth=None,
teacher_force_prob=0.9,
):
listener_feats, (listener_h, listener_c) = self.listener(
inputs, input_lengths)
listener_hc = self.combine_h_and_c(listener_h, listener_c)
las_log_probs = self.speller(
listener_feats,
ground_truth=ground_truth,
teacher_force_prob=teacher_force_prob,
)
ctc_log_probs = self.ctc_decoder(listener_feats)
return ctc_log_probs, las_log_probs, listener_hc
def get_loss(
self,
ctc_log_probs,
las_log_probs,
input_lengths,
labels,
label_lengths,
num_labels,
pad_index=0,
blank_index=0,
label_smooth=0.1,
):
ctc_loss = self.ctc_decoder.get_loss(
ctc_log_probs,
input_lengths // 4,
labels,
label_lengths,
blank=blank_index,
)
las_loss = super().get_loss(
las_log_probs,
labels,
num_labels,
pad_index=pad_index,
label_smooth=label_smooth,
)
return ctc_loss, las_loss
class CTCDecoder(nn.Module):
"""
This is a small decoder (just on linear layer) that takes
the listener embedding from LAS and imposes a CTC
objective on the decoding.
NOTE: This is only to be used for the JOint CTC-Attention model.
"""
def __init__(self, listener_hidden_dim, num_class):
super().__init__()
self.fc = nn.Linear(listener_hidden_dim, num_class)
self.dropout = nn.Dropout()
self.listener_hidden_dim = listener_hidden_dim
self.num_class = num_class
def forward(self, listener_outputs):
batch_size, maxlen, _ = listener_outputs.size()
logits = self.fc(self.dropout(listener_outputs))
logits = logits.view(batch_size, maxlen, self.num_class)
log_probs = F.log_softmax(logits, dim=2)
return log_probs
def get_loss(
self,
log_probs,
input_lengths,
labels,
label_lengths,
blank=0,
):
log_probs = log_probs.permute(1, 0, 2)
ctc_loss = F.ctc_loss(
log_probs.contiguous(),
labels.long(),
input_lengths.long(),
label_lengths.long(),
blank=blank,
zero_infinity=True,
)
return ctc_loss
| 28.953125
| 73
| 0.58041
|
569f838a93a807f16f574f124b142c38351b272c
| 12,981
|
py
|
Python
|
toughsat/random_ksat.py
|
3cnf/descriptor-solver
|
f76a795c16c8b024841600402da2f3f7ee6fdee1
|
[
"MIT"
] | null | null | null |
toughsat/random_ksat.py
|
3cnf/descriptor-solver
|
f76a795c16c8b024841600402da2f3f7ee6fdee1
|
[
"MIT"
] | null | null | null |
toughsat/random_ksat.py
|
3cnf/descriptor-solver
|
f76a795c16c8b024841600402da2f3f7ee6fdee1
|
[
"MIT"
] | null | null | null |
# Copyright (C) 2011 by Henry Yuen, Joseph Bebel
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Random kSat Routine
# written by Henry Yuen
# for ToughSat Project
import math
import copy
import sys
import shutil
import gc
import random
verbose = 0
testing = 0
assignment = {}
n = 5
formula = []
vars = {}
postfix_counter = 0
def make_var():
global vars
global postfix_counter
postfix_counter = postfix_counter + 1
if postfix_counter%10000 == 0:
print (postfix_counter)
return "X" + str(postfix_counter)
def make_conj(exprs):
conj = ["&"]
for e in exprs:
conj.append(copy.copy(e))
return conj
def make_disj(exprs):
conj = ["V"]
for e in exprs:
conj.append(copy.copy(e))
return conj
def make_neg(expr):
conj = ["neg",copy.copy(expr)]
return conj
#def make_val(v):
# return ["val",v]
def make_iff(e1,e2):
#same as equals, essentially
return ["<->",copy.copy(e1),copy.copy(e2)]
def make_xor(e1,e2):
#pos = make_conj([e1,make_neg(e2)])
#neg = make_conj([e2,make_neg(e1)])
#return make_disj([pos,neg])
return ["+",copy.copy(e1),copy.copy(e2)]
def allocate_var(name,num):
global vars
vars[name] = []
for i in range(num):
varname = make_var()
vars[name].append(["var",varname])
def measure_formula(formula):
count = 0
if formula[0] != "var" and formula[0] != "val":
for i in range(1,len(formula)):
count += measure_formula(formula[i])
else:
return 1
return count
def print_formula(formula):
s = ""
t = formula[0]
if t == "val":
if formula[1] == 1:
s += "T"
else:
s += "F"
if t == "neg":
s += "~"
if formula[1][0] != "var":
s += "("
s += print_formula(formula[1])
if formula[1][0] != "var":
s += ")"
if t == "<->": #iff
s += "("
s += print_formula(formula[1])
s += " <--> "
s += print_formula(formula[2])
s += ")"
if t == "+": #iff
s += "("
s += print_formula(formula[1])
s += " + "
s += print_formula(formula[2])
s += ")"
if t == "var":
s += formula[1]
if t == "V":
s += "("
for j in range(1,len(formula)-1):
s += print_formula(formula[j])
s += " V "
s += print_formula(formula[len(formula)-1])
s += ")"
if t == "&":
s += "("
for j in range(1,len(formula)-1):
s += print_formula(formula[j])
s += " & "
s += print_formula(formula[len(formula)-1])
s += ")"
return s
def evaluate_formula(formula,assignment):
#print formula
t = formula[0]
if t == "val":
return formula[1]
if t == "neg":
return (evaluate_formula(formula[1],assignment) + 1) % 2
if t == "var":
return assignment[formula[1]]
if t == "V":
for j in range(1,len(formula)):
v = evaluate_formula(formula[j],assignment)
if v == 1:
return 1
return 0
if t == "&":
for j in range(1,len(formula)):
v = evaluate_formula(formula[j],assignment)
if v == 0:
return 0
return 1
if t == "+":
v1 = evaluate_formula(formula[1],assignment)
v2 = evaluate_formula(formula[2],assignment)
return (v1 + v2) % 2
if t == "<->":
v1 = evaluate_formula(formula[1],assignment)
v2 = evaluate_formula(formula[2],assignment)
return (1 + v1 + v2) % 2
return 0
#convert to CNF
def distribute_negs(formula):
#print formula
t = formula[0]
if t == "neg":
if formula[1][0] == "val":
formula[1][1] = (formula[1][1]+1)%2 #negate the value
formula = formula[1]
elif formula[1][0] == "neg":
#undo negation
formula = formula[1][1]
elif formula[1][0] in ["&","V"]:
#distribute over
if formula[1][0] == "&":
formula[1][0] = "V"
else:
formula[1][0] = "&"
for i in range(1,len(formula[1])):
formula[1][i] = make_neg(formula[1][i])
formula = formula[1]
elif formula[1][0] in ["<->"]:
#change it to xor
formula[1][0] = "+"
formula = formula[1]
elif formula[1][0] in ["+"]:
#change it to xor
formula[1][0] = "<->"
formula = formula[1]
#it may have changed
t = formula[0]
if t == "val":
return formula
if t == "var":
return formula
for i in range(1,len(formula)):
formula[i] = distribute_negs(formula[i])
return formula
def variabilize_values(formula):
t = formula[0]
if t == "var":
return formula
if t == "val":
return vars["constants"][formula[1]]
for i in range(1,len(formula)):
formula[i] = variabilize_values(formula[i])
return formula
def associatize(formula):
threshold = 3
t = formula[0]
if t in ["&","V"]:
if len(formula) > threshold:
sub_formula = [t]
sub_formula.extend(formula[threshold-1:])
#formula = [t,formula[1],sub_formula]
temp_formula = [t]
temp_formula.extend(formula[1:threshold-1])
temp_formula.append(sub_formula)
formula = temp_formula
if t not in ["val","var"]:
for i in range(1,len(formula)):
formula[i] = associatize(formula[i])
return formula
#auxiliary helper function
#to take a formula in a tree structure (consisting of AND and OR and IFF and XOR operations only)
#and assign every internal node a dummy variable
def flatten_formula_tree(formula,nodevar):
t = formula[0]
flattened_subtree = []
flattened_clause = []
if t in ["&","V","<->","+"]:
flattened_clause = [t]
for i in range(1,len(formula)):
e = formula[i]
#check if we have to create new variables (we have encountered a leaf or an internal node)
if e[0] in ["&","V","<->","+"]:
e_nodevar = ["var",make_var()]
flattened_clause.append(e_nodevar)
#now we flatten this branch of the tree
flattened_subtree.extend(flatten_formula_tree(e,e_nodevar))
else:
flattened_clause.append(e) #e1 is either neg or var
else:
return []
#so now our clause looks like: v1 <-> (v2 & v3 & ...)
flattened_subtree.append(["<->",nodevar,flattened_clause])
return flattened_subtree
def convert_1_to_3(expr):
#create auxiliary variables
v1 = ["var",make_var()]
v2 = ["var",make_var()]
v1_neg = make_neg(v1)
v2_neg = make_neg(v2)
return [make_disj([expr,v1,v2]), \
make_disj([expr,v1,v2_neg]), \
make_disj([expr,v1_neg,v2]), \
make_disj([expr,v1_neg,v2_neg])]
#extract all the variables present in a clause
#assuming all we have are <->, &, V, negs, and vars
def extract_variables(formula):
if formula[0] == "var":
return [formula[1]]
v = []
for i in range(1,len(formula)):
v2 = extract_variables(formula[i])
for u in v2:
if u not in v:
v.append(u)
return v
def write_cnf_clauses_to_dimacs(clauses):
output = ""
for clause in clauses:
s = ""
t = clause[0]
if t in ["&","V"]:
for i in range(1,len(clause)):
t = clause[i][0]
if t == "neg":
s += "-" + str(clause[i][1][1][1: ]) + " "
else: #it's a var
s += str(clause[i][1][1:]) + " "
elif t in ["neg"]:
s += "-" + str(clause[1][1][1: ]) + " "
elif t in ["var"]:
s += str(clause[1][1:]) + " "
s += "0\n"
output += s
return output
def convert_clause_to_cnf(clause):
#otherwise, make truth table!
#extract the variables in this clause
vs = extract_variables(clause)
#create all possible assignments for the v's
cnf_clauses = []
for j in range(2**len(vs)):
temp_assgn = {}
v = []
for k in range(len(vs)):
bit = (j >> k) % 2
temp_assgn[vs[k]] = bit
if bit == 0:
v.append(["var",vs[k]])
else:
v.append(make_neg(["var",vs[k]]))
#test the truth assignment
val = evaluate_formula(clause,temp_assgn)
#if we have a 0, we have winner winner chicken dinner
if val == 0:
cnf_clauses.append(make_disj(v))
return cnf_clauses
def convert_4cnf_to_3cnf_efficient(formula):
#takes a 4cnf clause and converts it to 3cnf
#print print_formula(formula)
dummyvar = ["var",make_var()]
cnf_clauses = []
part1 = formula[0:3]
part1.append(dummyvar)
#print print_formula(part1)
cnf_clauses.append(part1)
part2 = ["<->",dummyvar,["V"] + formula[3:5]]
#print print_formula(part2)
cnf_clauses.extend(convert_clause_to_cnf(part2))
return cnf_clauses
def convert_to_3cnf_canonical(formula):
#formula = distribute_negs(formula)
#print print_formula(formula)
formula = associatize(formula)
#now that we've variabilized the values
#and we've distributed the negs
#and we've associatized
#we're ready to rock and roll - convert to 3CNF baby!
#print print_formula(formula)
#our input formula is in a tree data structure now
#give dummy variables to all the internal nodes
root_nodevar = ["var",make_var()]
clauses = flatten_formula_tree(formula,root_nodevar)
#print print_formula(make_conj(clauses))
#now, we can convert each clause
#to CNF
#add the root nodevar
cnf_clauses = convert_1_to_3(root_nodevar)
#cnf_clauses = [root_nodevar]
for i in range(len(clauses)):
clause = clauses[i]
#if the clause is already disjunctive then we're fine
if clause[0] == "V":
cnf_clauses.append(clause)
continue
cnf_clauses.extend(convert_clause_to_cnf(clause))
#write_cnf_clauses_to_file(fh,cnf_clauses)
return cnf_clauses
def convert_to_3cnf_efficient(formula):
t = formula[0]
#print print_formula(formula)
if t in ["var","neg"]:
return convert_1_to_3(formula)
if t in ["&"]:
return convert_to_3cnf_canonical(formula)
#we're of the "V" type now
l = len(formula)
if l == 2:
return convert_1_to_3(formula[1])
if l == 3:
return convert_2_to_3(formula[1],formula[2])
if l == 4:
return [formula] #is already in 3CNF form
if l == 5:
return convert_4cnf_to_3cnf_efficient(formula)
return convert_to_3cnf_canonical(formula)
#=============================================================================================================
#
#
# MAIN FACTORING CODE
#
#
#
#=============================================================================================================
def halt():
a = 0
b = 3/a
def generate_instance(NUM_CLAUSES,NUM_VARIABLES,LIT_PER_CLAUSE,op_3cnf):
global formula
global vars
global postfix_counter
global num_clauses
formula = []
vars = {}
postfix_counter = 0
num_clauses = 0
allocate_var("v",NUM_VARIABLES)
formula = []
for i in range(NUM_CLAUSES):
lits = []
vs = random.sample(xrange(NUM_VARIABLES),LIT_PER_CLAUSE)
for j in range(LIT_PER_CLAUSE):
if random.random() > 0.5:
lits.append(vars["v"][vs[j]])
else:
lits.append(make_neg(vars["v"][vs[j]]))
formula.append(make_disj(lits))
if op_3cnf:
cnf_clauses = []
for f in formula:
g = convert_to_3cnf_efficient(f)
#g = convert_4cnf_to_3cnf_canonical(f)
cnf_clauses.extend(g)
#cnf_clauses.append(f)
#print print_formula(make_conj(g))
#break
formula = cnf_clauses
#halt()
#print print_formula(make_conj(cnf_clauses))
num_clauses = len(formula)
num_variables = postfix_counter
output = "c A SAT instance generated from a " + str(LIT_PER_CLAUSE) + "-CNF formula that had " + str(NUM_CLAUSES) + " clauses and " + str(NUM_VARIABLES) + " variables\n"
output += "p cnf " + str(num_variables) + " " + str(num_clauses) + "\n"
output += write_cnf_clauses_to_dimacs(formula)
return output
def main():
#generate partial product sums
args = sys.argv
if len(args) != 6:
print ("Usage: random_ksat.py <k> <numvars> <numclauses> <op_3cnf> <outputfile>")
return
k = int(args[1])
numvars = int(args[2])
numclauses = int(args[3])
op_3cnf = args[4] == "1"
output = generate_instance(numclauses,numvars,k,op_3cnf)
f = open(args[5],"w")
f.write(output)
f.close()
if __name__ == '__main__':
main()
| 23.097865
| 171
| 0.612202
|
14a439072a0bd82fb2688001935dd365727d2f5e
| 49,415
|
py
|
Python
|
abcpy/distances.py
|
vishalbelsare/abcpy
|
72d0d31ae3fa531b69ea3fef39c96af6628ee76f
|
[
"BSD-3-Clause-Clear"
] | 89
|
2017-02-23T23:34:52.000Z
|
2022-03-25T20:35:17.000Z
|
abcpy/distances.py
|
vishalbelsare/abcpy
|
72d0d31ae3fa531b69ea3fef39c96af6628ee76f
|
[
"BSD-3-Clause-Clear"
] | 35
|
2017-03-31T13:24:52.000Z
|
2022-01-09T11:31:38.000Z
|
abcpy/distances.py
|
vishalbelsare/abcpy
|
72d0d31ae3fa531b69ea3fef39c96af6628ee76f
|
[
"BSD-3-Clause-Clear"
] | 32
|
2017-03-22T06:27:43.000Z
|
2021-09-17T15:50:42.000Z
|
import numpy as np
import warnings
from abc import ABCMeta, abstractmethod
from glmnet import LogitNet
from sklearn import linear_model
from sklearn.neighbors import NearestNeighbors
from abcpy.utils import wass_dist
class Distance(metaclass=ABCMeta):
"""This abstract base class defines how the distance between the observed and
simulated data should be implemented.
"""
def __init__(self, statistics_calc):
"""The constructor of a sub-class must accept a non-optional statistics
calculator as a parameter; then, it must call the __init__ method of the parent class. This ensures that the
object is initialized correctly so that the _calculate_summary_stat private method can be called when computing
the distances.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
"""
self.statistics_calc = statistics_calc
# Since the observations do always stay the same, we can save the
# summary statistics of them and not recalculate it each time
self.s1 = None
self.data_set = None
self.dataSame = False
@abstractmethod
def distance(self, d1, d2):
"""To be overwritten by any sub-class: should calculate the distance between two
sets of data d1 and d2 using their respective statistics.
Usually, calling the _calculate_summary_stat private method to obtain statistics from the datasets is handy;
that also keeps track of the first provided dataset (which is the observation in ABCpy inference schemes) and
avoids computing the statistics for that multiple times.
Notes
-----
The data sets d1 and d2 are array-like structures that contain n1 and n2 data
points each. An implementation of the distance function should work along
the following steps:
1. Transform both input sets dX = [ dX1, dX2, ..., dXn ] to sX = [sX1, sX2,
..., sXn] using the statistics object. See _calculate_summary_stat method.
2. Calculate the mutual desired distance, here denoted by - between the
statistics; for instance, dist = [s11 - s21, s12 - s22, ..., s1n - s2n] (in some cases however you
may want to compute all pairwise distances between statistics elements.
Important: any sub-class must not calculate the distance between data sets
d1 and d2 directly. This is the reason why any sub-class must be
initialized with a statistics object.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
raise NotImplementedError
@abstractmethod
def dist_max(self):
"""To be overwritten by sub-class: should return maximum possible value of the
desired distance function.
Examples
--------
If the desired distance maps to :math:`\mathbb{R}`, this method should return numpy.inf.
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
raise NotImplementedError
def _calculate_summary_stat(self, d1, d2):
"""Helper function that extracts the summary statistics s1 and s2 from d1 and
d2 using the statistics object stored in self.statistics_calc. This stores s1 for the purpose of checking
whether that is repeated in next calls to the function, and avoiding computing the statitistics for the same
dataset several times.
Parameters
----------
d1 : array-like
d1 contains n data sets.
d2 : array-like
d2 contains m data sets.
Returns
-------
tuple
Tuple containing numpy.ndarray's with the summary statistics extracted from d1 and d2.
"""
if not isinstance(d1, list):
raise TypeError('Data is not of allowed types')
if not isinstance(d2, list):
raise TypeError('Data is not of allowed types')
# Check whether d1 is same as self.data_set
if self.data_set is not None:
# check that the the observations have the same length; if not, they can't be the same:
if len(d1) != len(self.data_set):
self.dataSame = False
elif len(np.array(d1[0]).reshape(-1, )) == 1:
self.dataSame = self.data_set == d1
else:
self.dataSame = all([(np.array(self.data_set[i]) == np.array(d1[i])).all() for i in range(len(d1))])
# Extract summary statistics from the dataset
if self.s1 is None or self.dataSame is False:
self.s1 = self.statistics_calc.statistics(d1)
self.data_set = d1
s2 = self.statistics_calc.statistics(d2)
if self.s1.shape[1] != s2.shape[1]:
raise ValueError("The dimension of summaries in the two datasets is different; check the dimension of the"
" provided observations and simulations.")
return self.s1, s2
class Divergence(Distance, metaclass=ABCMeta):
"""This is an abstract class which subclasses Distance, and is used as a parent class for all divergence
estimators; more specifically, it is used for all Distances which compare the empirical distribution of simulations
and observations."""
@abstractmethod
def _estimate_always_positive(self):
"""This returns whether the implemented divergence always returns positive values or not. In fact, some
estimators may return negative values, which may break some inference algorithms"""
raise NotImplementedError
class Euclidean(Distance):
"""
This class implements the Euclidean distance between two vectors.
The maximum value of the distance is np.inf.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
"""
def __init__(self, statistics_calc):
super(Euclidean, self).__init__(statistics_calc)
def distance(self, d1, d2):
"""Calculates the distance between two datasets, by computing Euclidean distance between each element of d1 and
d2 and taking their average.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
# compute distance between the statistics
dist = np.zeros(shape=(s1.shape[0], s2.shape[0]))
for ind1 in range(0, s1.shape[0]):
for ind2 in range(0, s2.shape[0]):
dist[ind1, ind2] = np.sqrt(np.sum(pow(s1[ind1, :] - s2[ind2, :], 2)))
return dist.mean()
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
return np.inf
class PenLogReg(Divergence):
"""
This class implements a distance measure based on the classification accuracy.
The classification accuracy is calculated between two dataset d1 and d2 using
lasso penalized logistics regression and return it as a distance. The lasso
penalized logistic regression is done using glmnet package of Friedman et. al.
[2]. While computing the distance, the algorithm automatically chooses
the most relevant summary statistics as explained in Gutmann et. al. [1].
The maximum value of the distance is 1.0.
[1] Gutmann, M. U., Dutta, R., Kaski, S., & Corander, J. (2018). Likelihood-free inference via classification.
Statistics and Computing, 28(2), 411-425.
[2] Friedman, J., Hastie, T., and Tibshirani, R. (2010). Regularization
paths for generalized linear models via coordinate descent. Journal of Statistical
Software, 33(1), 1–22.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
"""
def __init__(self, statistics_calc):
super(PenLogReg, self).__init__(statistics_calc)
self.n_folds = 10 # for cross validation in PenLogReg
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
self.n_simulate = s1.shape[0]
if not s2.shape[0] == self.n_simulate:
raise RuntimeError("The number of simulations in the two data sets should be the same in order for "
"the classification accuracy implemented in PenLogReg to be a proper distance. Please "
"check that `n_samples` in the `sample()` method for the sampler is equal to "
"the number of datasets in the observations.")
# compute distance between the statistics
training_set_features = np.concatenate((s1, s2), axis=0)
label_s1 = np.zeros(shape=(len(s1), 1))
label_s2 = np.ones(shape=(len(s2), 1))
training_set_labels = np.concatenate((label_s1, label_s2), axis=0).ravel()
groups = np.repeat(np.arange(self.n_folds), np.int(np.ceil(self.n_simulate / self.n_folds)))
groups = groups[:self.n_simulate].tolist()
groups += groups # duplicate it as groups need to be defined for both datasets
m = LogitNet(alpha=1, n_splits=self.n_folds) # note we are not using random seed here!
m = m.fit(training_set_features, training_set_labels, groups=groups)
distance = 2.0 * (m.cv_mean_score_[np.where(m.lambda_path_ == m.lambda_max_)[0][0]] - 0.5)
return distance
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
return 1.0
def _estimate_always_positive(self):
return False
class LogReg(Divergence):
"""This class implements a distance measure based on the classification
accuracy [1]. The classification accuracy is calculated between two dataset d1 and d2 using
logistics regression and return it as a distance. The maximum value of the distance is 1.0.
The logistic regression may not converge when using one single sample in each dataset (as for instance by putting
n_samples_per_param=1 in an inference routine).
[1] Gutmann, M. U., Dutta, R., Kaski, S., & Corander, J. (2018). Likelihood-free inference via classification.
Statistics and Computing, 28(2), 411-425.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
seed : integer, optionl
Seed used to initialize the Random Numbers Generator used to determine the (random) cross validation split
in the Logistic Regression classifier.
"""
def __init__(self, statistics_calc, seed=None):
super(LogReg, self).__init__(statistics_calc)
# seed is used for a RandomState for the random split in the LogisticRegression classifier:
self.rng = np.random.RandomState(seed=seed)
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
# compute distance between the statistics
training_set_features = np.concatenate((s1, s2), axis=0)
label_s1 = np.zeros(shape=(len(s1), 1))
label_s2 = np.ones(shape=(len(s2), 1))
training_set_labels = np.concatenate((label_s1, label_s2), axis=0).ravel()
reg_inv = 1e5
log_reg_model = linear_model.LogisticRegression(C=reg_inv, penalty='l1', max_iter=1000, solver='liblinear',
random_state=self.rng.randint(0, np.iinfo(np.uint32).max))
log_reg_model.fit(training_set_features, training_set_labels)
score = log_reg_model.score(training_set_features, training_set_labels)
distance = 2.0 * (score - 0.5)
return distance
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
return 1.0
def _estimate_always_positive(self):
return False
class Wasserstein(Divergence):
"""This class implements a distance measure based on the 2-Wasserstein distance, as used in [1]. This considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and computes the 2-Wasserstein distance between the empirical distributions those
simulations/observations define.
[1] Bernton, E., Jacob, P.E., Gerber, M. and Robert, C.P. (2019), Approximate Bayesian computation with the
Wasserstein distance. J. R. Stat. Soc. B, 81: 235-269. doi:10.1111/rssb.12312
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
num_iter_max : integer, optional
The maximum number of iterations in the linear programming algorithm to estimate the Wasserstein distance.
Default to 100000.
"""
def __init__(self, statistics_calc, num_iter_max=100000):
super(Wasserstein, self).__init__(statistics_calc)
self.num_iter_max = num_iter_max
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
# compute the Wasserstein distance between the empirical distributions:
return wass_dist(samples_1=s1, samples_2=s2, num_iter_max=self.num_iter_max)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
def _estimate_always_positive(self):
return True
class SlicedWasserstein(Divergence):
"""This class implements a distance measure based on the sliced 2-Wasserstein distance, as used in [1].
This considers the several simulations/observations in the datasets as iid samples from the model for a fixed
parameter value/from the data generating model, and computes the sliced 2-Wasserstein distance between the
empirical distributions those simulations/observations define. Specifically, the sliced Wasserstein distance
is a cheaper version of the Wasserstein distance which consists of projecting the multivariate data on 1d directions
and computing the 1d Wasserstein distance, which is computationally cheap. The resulting sliced Wasserstein
distance is obtained by averaging over a given number of projections.
[1] Nadjahi, K., De Bortoli, V., Durmus, A., Badeau, R., & Şimşekli, U. (2020, May). Approximate bayesian
computation with the sliced-wasserstein distance. In ICASSP 2020-2020 IEEE International Conference on Acoustics,
Speech and Signal Processing (ICASSP) (pp. 5470-5474). IEEE.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
n_projections : int, optional
Number of 1d projections used for estimating the sliced Wasserstein distance. Default value is 50.
rng : np.random.RandomState, optional
random number generators used to generate the projections. If not provided, a new one is instantiated.
"""
def __init__(self, statistics_calc, n_projections=50, rng=np.random.RandomState()):
super(SlicedWasserstein, self).__init__(statistics_calc)
self.n_projections = n_projections
self.rng = rng
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
# compute the Wasserstein distance between the empirical distributions:
return self.sliced_wasserstein_distance(X_s=s1, X_t=s2, n_projections=self.n_projections, seed=self.rng)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
def _estimate_always_positive(self):
return True
# the following two functions are taken from
# https://github.com/PythonOT/POT/blob/78b44af2434f494c8f9e4c8c91003fbc0e1d4415/ot/sliced.py
# Author: Adrien Corenflos <adrien.corenflos@aalto.fi>
#
# License: MIT License
@staticmethod
def get_random_projections(n_projections, d, seed=None):
r"""
Taken from
https://github.com/PythonOT/POT/blob/78b44af2434f494c8f9e4c8c91003fbc0e1d4415/ot/sliced.py
Author: Adrien Corenflos <adrien.corenflos@aalto.fi>
License: MIT License
Generates n_projections samples from the uniform on the unit sphere of dimension d-1: :math:`\mathcal{U}(\mathcal{S}^{d-1})`
Parameters
----------
n_projections : int
number of samples requested
d : int
dimension of the space
seed: int or RandomState, optional
Seed used for numpy random number generator
Returns
-------
out: ndarray, shape (n_projections, d)
The uniform unit vectors on the sphere
Examples
--------
>>> n_projections = 100
>>> d = 5
>>> projs = get_random_projections(n_projections, d)
>>> np.allclose(np.sum(np.square(projs), 1), 1.) # doctest: +NORMALIZE_WHITESPACE
True
"""
if not isinstance(seed, np.random.RandomState):
random_state = np.random.RandomState(seed)
else:
random_state = seed
projections = random_state.normal(0., 1., [n_projections, d])
norm = np.linalg.norm(projections, ord=2, axis=1, keepdims=True)
projections = projections / norm
return projections
def sliced_wasserstein_distance(self, X_s, X_t, a=None, b=None, n_projections=50, seed=None, log=False):
r"""
Taken from
https://github.com/PythonOT/POT/blob/78b44af2434f494c8f9e4c8c91003fbc0e1d4415/ot/sliced.py
Author: Adrien Corenflos <adrien.corenflos@aalto.fi>
License: MIT License
Computes a Monte-Carlo approximation of the 2-Sliced Wasserstein distance
:math:`\mathcal{SWD}_2(\mu, \nu) = \underset{\theta \sim \mathcal{U}(\mathbb{S}^{d-1})}{\mathbb{E}}[\mathcal{W}_2^2(\theta_\# \mu, \theta_\# \nu)]^{\frac{1}{2}}`
where
:math:`\theta_\# \mu` stands for the pushforwars of the projection :math:`\mathbb{R}^d \ni X \mapsto \langle \theta, X \rangle`
Parameters
----------
X_s : ndarray, shape (n_samples_a, dim)
samples in the source domain
X_t : ndarray, shape (n_samples_b, dim)
samples in the target domain
a : ndarray, shape (n_samples_a,), optional
samples weights in the source domain
b : ndarray, shape (n_samples_b,), optional
samples weights in the target domain
n_projections : int, optional
Number of projections used for the Monte-Carlo approximation
seed: int or RandomState or None, optional
Seed used for numpy random number generator
log: bool, optional
if True, sliced_wasserstein_distance returns the projections used and their associated EMD.
Returns
-------
cost: float
Sliced Wasserstein Cost
log : dict, optional
log dictionary return only if log==True in parameters
Examples
--------
>>> n_samples_a = 20
>>> reg = 0.1
>>> X = np.random.normal(0., 1., (n_samples_a, 5))
>>> sliced_wasserstein_distance(X, X, seed=0) # doctest: +NORMALIZE_WHITESPACE
0.0
References
----------
Bonneel, Nicolas, et al. "Sliced and radon wasserstein barycenters of measures." Journal of Mathematical Imaging and Vision 51.1 (2015): 22-45
"""
from ot.lp import emd2_1d
X_s = np.asanyarray(X_s)
X_t = np.asanyarray(X_t)
n = X_s.shape[0]
m = X_t.shape[0]
if X_s.shape[1] != X_t.shape[1]:
raise ValueError(
"X_s and X_t must have the same number of dimensions {} and {} respectively given".format(X_s.shape[1],
X_t.shape[1]))
if a is None:
a = np.full(n, 1 / n)
if b is None:
b = np.full(m, 1 / m)
d = X_s.shape[1]
projections = self.get_random_projections(n_projections, d, seed)
X_s_projections = np.dot(projections, X_s.T)
X_t_projections = np.dot(projections, X_t.T)
if log:
projected_emd = np.empty(n_projections)
else:
projected_emd = None
res = 0.
for i, (X_s_proj, X_t_proj) in enumerate(zip(X_s_projections, X_t_projections)):
emd = emd2_1d(X_s_proj, X_t_proj, a, b, log=False, dense=False)
if projected_emd is not None:
projected_emd[i] = emd
res += emd
res = (res / n_projections) ** 0.5
if log:
return res, {"projections": projections, "projected_emds": projected_emd}
return res
class GammaDivergence(Divergence):
"""
This implements an empirical estimator of the gamma-divergence for ABC as suggested in [1]. In [1], the
gamma-divergence was proposed as a divergence which is robust to outliers. The estimator is based on a nearest
neighbor density estimate.
Specifically, this considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and estimates the divergence between the empirical distributions those
simulations/observations define.
[1] Fujisawa, M., Teshima, T., Sato, I., & Sugiyama, M.
γ-ABC: Outlier-robust approximate Bayesian computation based on a
robust divergence estimator.
In A. Banerjee and K. Fukumizu (Eds.), Proceedings of 24th
International Conference on Artificial Intelligence and Statistics
(AISTATS2021), Proceedings of Machine Learning Research, vol.130,
pp.1783-1791, online, Apr. 13-15, 2021.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
k : int, optional
nearest neighbor number for the density estimate. Default value is 1
gam : float, optional
the gamma parameter in the definition of the divergence. Default value is 0.1
"""
def __init__(self, statistics_calc, k=1, gam=0.1):
super(GammaDivergence, self).__init__(statistics_calc)
self.k = k # number of nearest neighbors used in the estimation algorithm
self.gam = gam
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
if s1.shape[0] > self.k or s2.shape[0] > self.k:
assert ValueError(f"The provided value of k ({self.k}) is smaller or equal than the number of samples "
f"in one of the two datasets; that should instead be larger")
# estimate the gamma divergence using the empirical distributions
return self.skl_estimator_gamma_q(s1=s1, s2=s2, k=self.k, gam=self.gam)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
@staticmethod
def skl_estimator_gamma_q(s1, s2, k=1, gam=0.1):
""" Gamma-Divergence estimator using scikit-learn's NearestNeighbours
s1: (N_1,D) Sample drawn from distribution P
s2: (N_2,D) Sample drawn from distribution Q
k: Number of neighbours considered (default 1)
return: estimated D(P|Q)
Adapted from code provided by Masahiro Fujisawa (University of Tokyo / RIKEN AIP)
"""
n, m = len(s1), len(s2) # NOTE: here different convention of n, m wrt MMD and EnergyDistance
d = float(s1.shape[1])
radius = 10 # this is not used at all...
s1_neighbourhood = NearestNeighbors(n_neighbors=k + 1, radius=radius, algorithm='kd_tree').fit(s1)
s2_neighbourhood = NearestNeighbors(n_neighbors=k, radius=radius, algorithm='kd_tree').fit(s2)
s3_neighbourhood = NearestNeighbors(n_neighbors=k + 1, radius=radius, algorithm='kd_tree').fit(s2)
d_gam = d * gam
s1_distances, indices = s1_neighbourhood.kneighbors(s1, k + 1)
s2_distances, indices = s2_neighbourhood.kneighbors(s1, k)
rho = s1_distances[:, -1]
nu = s2_distances[:, -1]
if np.any(rho == 0):
warnings.warn(
f"The distance between an element of the first dataset and its {k}-th NN in the same dataset "
f"is 0; this causes divergences in the code, and it is due to elements which are repeated "
f"{k + 1} times in the first dataset. Increasing the value of k usually solves this.",
RuntimeWarning)
# notice: the one below becomes 0 when one element in the s1 dataset is equal to one in the s2 dataset
# and k=1 (as the distance between those two would be 0, which gives infinity when dividing)
if np.any(nu == 0):
warnings.warn(f"The distance between an element of the first dataset and its {k}-th NN in the second "
f"dataset is 0; this causes divergences in the code, and it is usually due to equal "
f"elements"
f" in the two datasets. Increasing the value of k usually solves this.", RuntimeWarning)
second_term = np.sum(1 / (rho ** d_gam)) / (n * (n - 1) ** gam)
fourth_term = np.sum(1 / (nu ** d_gam)) / (n * m ** gam)
s3_distances, indices = s3_neighbourhood.kneighbors(s2, k + 1)
rho_q = s3_distances[:, -1]
if np.any(rho_q == 0):
warnings.warn(
f"The distance between an element of the second dataset and its {k}-th NN in the same dataset "
f"is 0; this causes divergences in the code, and it is due to elements which are repeated "
f"{k + 1} times in the second dataset. Increasing the value of k usually solves this.",
RuntimeWarning)
third_term = np.sum(1 / (rho_q ** d_gam))
# third_term /= m * (m ** gam) # original code: I think the second term here should be m - 1
third_term /= m * (m - 1) ** gam # corrected version
third_term = third_term ** gam
fourth_term = fourth_term ** (1 + gam)
D = (1 / (gam * (gam + 1))) * (np.log((second_term * third_term) / fourth_term))
return D
def _estimate_always_positive(self):
return False
class KLDivergence(Divergence):
"""
This implements an empirical estimator of the KL divergence for ABC as suggested in [1]. The estimator is based
on a nearest neighbor density estimate.
Specifically, this considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and estimates the divergence between the empirical distributions those
simulations/observations define.
[1] Jiang, B. (2018, March). Approximate Bayesian computation with Kullback-Leibler divergence as data discrepancy.
In International Conference on Artificial Intelligence and Statistics (pp. 1711-1721). PMLR.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
k : int, optional
nearest neighbor number for the density estimate. Default value is 1
"""
def __init__(self, statistics_calc, k=1):
super(KLDivergence, self).__init__(statistics_calc)
self.k = k # number of nearest neighbors used in the estimation algorithm
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
if s1.shape[0] > self.k or s2.shape[0] > self.k:
assert ValueError(f"The provided value of k ({self.k}) is smaller or equal than the number of samples "
f"in one of the two datasets; that should instead be larger")
# estimate the KL divergence using the empirical distributions
return self.skl_estimator_KL_div(s1=s1, s2=s2, k=self.k)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
@staticmethod
def skl_estimator_KL_div(s1, s2, k=1):
"""
Adapted from https://github.com/nhartland/KL-divergence-estimators/blob/5473a23f5f13d7557100504611c57c9225b1a6eb/src/knn_divergence.py
MIT license
KL-Divergence estimator using scikit-learn's NearestNeighbours
s1: (N_1,D) Sample drawn from distribution P
s2: (N_2,D) Sample drawn from distribution Q
k: Number of neighbours considered (default 1)
return: estimated D(P|Q)
"""
n, m = len(s1), len(s2) # NOTE: here different convention of n, m wrt MMD and EnergyDistance
d = float(s1.shape[1])
radius = 10 # this is useless
s1_neighbourhood = NearestNeighbors(n_neighbors=k + 1, radius=radius, algorithm='kd_tree').fit(s1)
s2_neighbourhood = NearestNeighbors(n_neighbors=k, radius=radius, algorithm='kd_tree').fit(s2)
s1_distances, indices = s1_neighbourhood.kneighbors(s1, k + 1)
s2_distances, indices = s2_neighbourhood.kneighbors(s1, k)
rho = s1_distances[:, -1]
nu = s2_distances[:, -1]
if np.any(rho == 0):
warnings.warn(
f"The distance between an element of the first dataset and its {k}-th NN in the same dataset "
f"is 0; this causes divergences in the code, and it is due to elements which are repeated "
f"{k + 1} times in the first dataset. Increasing the value of k usually solves this.",
RuntimeWarning)
D = np.sum(np.log(nu / rho))
return (d / n) * D + np.log(m / (n - 1)) # this second term should be enough for it to be valid for m \neq n
def _estimate_always_positive(self):
return False
class MMD(Divergence):
"""
This implements an empirical estimator of the MMD for ABC as suggested in [1]. This class implements a gaussian
kernel by default but allows specifying different kernel functions. Notice that the original version in [1]
suggested an unbiased estimate, which however can return negative values. We also provide a biased but provably
positive estimator following the remarks in [2].
Specifically, this considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and estimates the MMD between the empirical distributions those
simulations/observations define.
[1] Park, M., Jitkrittum, W., & Sejdinovic, D. (2016, May). K2-ABC: Approximate Bayesian computation with
kernel embeddings. In Artificial Intelligence and Statistics (pp. 398-407). PMLR.
[2] Nguyen, H. D., Arbel, J., Lü, H., & Forbes, F. (2020). Approximate Bayesian computation via the energy
statistic. IEEE Access, 8, 131683-131698.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
kernel : str or callable
Can be a string denoting the kernel, or a function. If a string, only gaussian is implemented for now; in
that case, you can also provide an additional keyword parameter 'sigma' which is used as the sigma in the
kernel. Default is the gaussian kernel.
biased_estimator : boolean, optional
Whether to use the biased (but always positive) or unbiased estimator; by default, it uses the biased one.
kernel_kwargs
Additional keyword arguments to be passed to the distance calculator.
"""
def __init__(self, statistics_calc, kernel="gaussian", biased_estimator=False, **kernel_kwargs):
super(MMD, self).__init__(statistics_calc)
self.kernel_vectorized = False
if not isinstance(kernel, str) and not callable(kernel):
raise RuntimeError("'kernel' must be either a string or a function.")
if isinstance(kernel, str):
if kernel == "gaussian":
self.kernel = self.def_gaussian_kernel(**kernel_kwargs)
self.kernel_vectorized = True # the gaussian kernel is vectorized
else:
raise NotImplementedError("The required kernel is not implemented.")
else:
self.kernel = kernel # if kernel is a callable already
self.biased_estimator = biased_estimator
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
# compute the Gram matrix
K11, K22, K12 = self.compute_Gram_matrix(s1, s2)
# Estimate MMD
if self.biased_estimator:
return self.MMD_V_estimator(K11, K22, K12)
else:
return self.MMD_unbiased(K11, K22, K12)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
@staticmethod
def def_gaussian_kernel(sigma=1):
# notice in the MMD paper they set sigma to a median value over the observation; check that.
sigma_2 = 2 * sigma ** 2
# def Gaussian_kernel(x, y):
# xy = x - y
# # assert np.allclose(np.dot(xy, xy), np.linalg.norm(xy) ** 2)
# return np.exp(- np.dot(xy, xy) / sigma_2)
def Gaussian_kernel_vectorized(X, Y):
"""Here X and Y have shape (n_samples_x, n_features) and (n_samples_y, n_features);
this directly computes the kernel for all pairwise components"""
XY = X.reshape(X.shape[0], 1, -1) - Y.reshape(1, Y.shape[0], -1) # pairwise differences
return np.exp(- np.einsum('xyi,xyi->xy', XY, XY) / sigma_2)
return Gaussian_kernel_vectorized
def compute_Gram_matrix(self, s1, s2):
if self.kernel_vectorized:
K11 = self.kernel(s1, s1)
K22 = self.kernel(s2, s2)
K12 = self.kernel(s1, s2)
else:
m = s1.shape[0]
n = s2.shape[0]
K11 = np.zeros((m, m))
K22 = np.zeros((n, n))
K12 = np.zeros((m, n))
for i in range(m):
# we assume the function to be symmetric; this saves some steps:
for j in range(i, m):
K11[j, i] = K11[i, j] = self.kernel(s1[i], s1[j])
for i in range(n):
# we assume the function to be symmetric; this saves some steps:
for j in range(i, n):
K22[j, i] = K22[i, j] = self.kernel(s2[i], s2[j])
for i in range(m):
for j in range(n):
K12[i, j] = self.kernel(s1[i], s2[j])
# can we improve the above? Could use map but would not change too much likely.
return K11, K22, K12
@staticmethod
def MMD_unbiased(Kxx, Kyy, Kxy):
# from https://github.com/eugenium/MMD/blob/2fe67cbc7378f10f3b273cfd8d8bbd2135db5798/mmd.py
# The estimate when distribution of x is not equal to y
m = Kxx.shape[0]
n = Kyy.shape[0]
t1 = (1. / (m * (m - 1))) * np.sum(Kxx - np.diag(np.diagonal(Kxx)))
t2 = (2. / (m * n)) * np.sum(Kxy)
t3 = (1. / (n * (n - 1))) * np.sum(Kyy - np.diag(np.diagonal(Kyy)))
MMDsquared = (t1 - t2 + t3)
return MMDsquared
@staticmethod
def MMD_V_estimator(Kxx, Kyy, Kxy):
# The estimate when distribution of x is not equal to y
m = Kxx.shape[0]
n = Kyy.shape[0]
t1 = (1. / (m * m)) * np.sum(Kxx)
t2 = (2. / (m * n)) * np.sum(Kxy)
t3 = (1. / (n * n)) * np.sum(Kyy)
MMDsquared = (t1 - t2 + t3)
return MMDsquared
def _estimate_always_positive(self):
return self.biased_estimator
class EnergyDistance(MMD):
"""
This implements an empirical estimator of the Energy Distance for ABC as suggested in [1].
This class uses the Euclidean distance by default as a base distance, but allows to pass different distances.
Moreover, when the Euclidean distance is specified, it is possible to pass an additional keyword argument `beta`
which denotes the power of the distance to consider.
In [1], the authors suggest to use a biased but provably positive estimator; we also provide an unbiased estimate,
which however can return negative values.
Specifically, this considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and estimates the MMD between the empirical distributions those
simulations/observations define.
[1] Nguyen, H. D., Arbel, J., Lü, H., & Forbes, F. (2020). Approximate Bayesian computation via the energy
statistic. IEEE Access, 8, 131683-131698.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
base_distance : str or callable
Can be a string denoting the kernel, or a function. If a string, only Euclidean distance is implemented
for now; in that case, you can also provide an additional keyword parameter 'beta' which is the power
of the distance to consider. By default, this uses the Euclidean distance.
biased_estimator : boolean, optional
Whether to use the biased (but always positive) or unbiased estimator; by default, it uses the biased one.
base_distance_kwargs
Additional keyword arguments to be passed to the distance calculator.
"""
def __init__(self, statistics_calc, base_distance="Euclidean", biased_estimator=True, **base_distance_kwargs):
if not isinstance(base_distance, str) and not callable(base_distance):
raise RuntimeError("'base_distance' must be either a string or a function.")
if isinstance(base_distance, str):
if base_distance == "Euclidean":
self.base_distance = self.def_Euclidean_distance(**base_distance_kwargs)
else:
raise NotImplementedError("The required kernel is not implemented.")
else:
self.base_distance = base_distance # if base_distance is a callable already
self.biased_estimator = biased_estimator
def negative_distance(*args):
return - self.base_distance(*args)
super(EnergyDistance, self).__init__(statistics_calc, kernel=negative_distance,
biased_estimator=self.biased_estimator)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
# As the statistics are positive, the max possible value is 1
return np.inf
@staticmethod
def def_Euclidean_distance(beta=1):
if beta <= 0 or beta > 2:
raise RuntimeError("'beta' not in the right range (0,2]")
if beta == 1:
def Euclidean_distance(x, y):
return np.linalg.norm(x - y)
else:
def Euclidean_distance(x, y):
return np.linalg.norm(x - y) ** beta
return Euclidean_distance
class SquaredHellingerDistance(Divergence):
"""
This implements an empirical estimator of the squared Hellinger distance for ABC. Using the Hellinger distance was
suggested originally in [1], but as that work did not provide originally any implementation details, this
implementation is original. The estimator is based on a nearest neighbor density estimate.
Specifically, this considers the
several simulations/observations in the datasets as iid samples from the model for a fixed parameter value/from the
data generating model, and estimates the divergence between the empirical distributions those
simulations/observations define.
[1] Frazier, D. T. (2020). Robust and Efficient Approximate Bayesian Computation: A Minimum Distance Approach.
arXiv preprint arXiv:2006.14126.
Parameters
----------
statistics_calc : abcpy.statistics.Statistics
Statistics extractor object that conforms to the Statistics class.
k : int, optional
nearest neighbor number for the density estimate. Default value is 1
"""
def __init__(self, statistics_calc, k=1):
super(SquaredHellingerDistance, self).__init__(statistics_calc)
self.k = k # number of nearest neighbors used in the estimation algorithm
def distance(self, d1, d2):
"""Calculates the distance between two datasets.
Parameters
----------
d1: Python list
Contains n1 data points.
d2: Python list
Contains n2 data points.
Returns
-------
numpy.float
The distance between the two input data sets.
"""
s1, s2 = self._calculate_summary_stat(d1, d2)
if s1.shape[0] > self.k or s2.shape[0] > self.k:
assert ValueError(f"The provided value of k ({self.k}) is smaller or equal than the number of samples "
f"in one of the two datasets; that should instead be larger")
# estimate the gamma divergence using the empirical distributions
return self.skl_estimator_squared_Hellinger_distance(s1=s1, s2=s2, k=self.k)
def dist_max(self):
"""
Returns
-------
numpy.float
The maximal possible value of the desired distance function.
"""
return 2
@staticmethod
def skl_estimator_squared_Hellinger_distance(s1, s2, k=1):
""" Squared Hellinger distance estimator using scikit-learn's NearestNeighbours
s1: (N_1,D) Sample drawn from distribution P
s2: (N_2,D) Sample drawn from distribution Q
k: Number of neighbours considered (default 1)
return: estimated D(P|Q)
"""
n, m = len(s1), len(s2) # NOTE: here different convention of n, m wrt MMD and EnergyDistance
d = float(s1.shape[1])
d_2 = d / 2
radius = 10 # this is not used at all...
s1_neighbourhood_k1 = NearestNeighbors(n_neighbors=k + 1, radius=radius, algorithm='kd_tree').fit(s1)
s1_neighbourhood_k = NearestNeighbors(n_neighbors=k, radius=radius, algorithm='kd_tree').fit(s1)
s2_neighbourhood_k1 = NearestNeighbors(n_neighbors=k + 1, radius=radius, algorithm='kd_tree').fit(s2)
s2_neighbourhood_k = NearestNeighbors(n_neighbors=k, radius=radius, algorithm='kd_tree').fit(s2)
s1_distances, indices = s1_neighbourhood_k1.kneighbors(s1, k + 1)
s2_distances, indices = s2_neighbourhood_k.kneighbors(s1, k)
rho = s1_distances[:, -1]
nu = s2_distances[:, -1]
if np.any(rho == 0):
warnings.warn(
f"The distance between an element of the first dataset and its {k}-th NN in the same dataset "
f"is 0; this is due to elements which are repeated "
f"{k + 1} times in the first dataset, and may lead to a poor estimate of the distance. "
f"Increasing the value of k usually solves this.",
RuntimeWarning)
if np.any(nu == 0):
warnings.warn(f"The distance between an element of the first dataset and its {k}-th NN in the second "
f"dataset is 0; this causes divergences in the code, and it is usually due to equal "
f"elements"
f" in the two datasets. Increasing the value of k usually solves this.", RuntimeWarning)
first_estimator = np.sum((rho / nu) ** d_2)
first_estimator = 2 - 2 * np.sqrt((n - 1) / m) * first_estimator
s2_distances, indices = s2_neighbourhood_k1.kneighbors(s2, k + 1)
s1_distances, indices = s1_neighbourhood_k.kneighbors(s2, k)
rho = s2_distances[:, -1]
nu = s1_distances[:, -1]
if np.any(rho == 0):
warnings.warn(
f"The distance between an element of the second dataset and its {k}-th NN in the same dataset "
f"is 0; this is due to elements which are repeated "
f"{k + 1} times in the second dataset, and may lead to a poor estimate of the distance. "
f"Increasing the value of k usually solves this.",
RuntimeWarning)
# notice: the one below becomes 0 when one element in the s1 dataset is equal to one in the s2 dataset
# and k=1 (as the distance between those two would be 0, which gives infinity when dividing)
if np.any(nu == 0):
warnings.warn(f"The distance between an element of the second dataset and its {k}-th NN in the first "
f"dataset is 0; this causes divergences in the code, and it is usually due to equal "
f"elements"
f" in the two datasets. Increasing the value of k usually solves this.", RuntimeWarning)
second_estimator = np.sum((rho / nu) ** d_2)
second_estimator = 2 - 2 * np.sqrt((m - 1) / n) * second_estimator
# average the two estimators:
final_estimator = 0.5 * (first_estimator + second_estimator)
return final_estimator
def _estimate_always_positive(self):
return True
| 40.504098
| 169
| 0.632966
|
e14b5c9dd45f2c43c23a5fb2942fbea318a80437
| 552
|
py
|
Python
|
tests/strict_after_load_test.py
|
jnice-81/dace
|
5211794a2d17b7189037ac485ab0b292fb02aa0d
|
[
"BSD-3-Clause"
] | 227
|
2019-03-15T23:39:06.000Z
|
2022-03-30T07:49:08.000Z
|
tests/strict_after_load_test.py
|
jnice-81/dace
|
5211794a2d17b7189037ac485ab0b292fb02aa0d
|
[
"BSD-3-Clause"
] | 834
|
2019-07-31T22:49:31.000Z
|
2022-03-28T14:01:32.000Z
|
tests/strict_after_load_test.py
|
jnice-81/dace
|
5211794a2d17b7189037ac485ab0b292fb02aa0d
|
[
"BSD-3-Clause"
] | 64
|
2019-03-19T05:40:37.000Z
|
2022-03-11T15:02:42.000Z
|
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
import dace
import os
@dace.program
def strict_after_load(A: dace.float32[10, 20], B: dace.float32[10, 20]):
for i, j in dace.map[0:10, 0:20]:
B[i, j] = A[i, j] + 1
def test():
sdfg = strict_after_load.to_sdfg(strict=False)
sdfg.save(os.path.join('_dacegraphs', 'before.sdfg'))
sdfg = dace.SDFG.from_file(os.path.join('_dacegraphs', 'before.sdfg'))
sdfg.apply_strict_transformations()
sdfg.compile()
if __name__ == "__main__":
test()
| 25.090909
| 75
| 0.67029
|
cb48ce5a48aaa9a1d60e850035f2e98a10c0651b
| 13,194
|
py
|
Python
|
datasets/wikipedia/wikipedia.py
|
hemildesai/datasets
|
69bc2ca01d3a4f599c5b56175046f21e88aaab07
|
[
"Apache-2.0"
] | 1
|
2021-04-15T17:59:46.000Z
|
2021-04-15T17:59:46.000Z
|
datasets/wikipedia/wikipedia.py
|
hemildesai/datasets
|
69bc2ca01d3a4f599c5b56175046f21e88aaab07
|
[
"Apache-2.0"
] | null | null | null |
datasets/wikipedia/wikipedia.py
|
hemildesai/datasets
|
69bc2ca01d3a4f599c5b56175046f21e88aaab07
|
[
"Apache-2.0"
] | 1
|
2021-03-24T18:33:32.000Z
|
2021-03-24T18:33:32.000Z
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Wikipedia dataset containing cleaned articles of all languages."""
from __future__ import absolute_import, division, print_function
import codecs
import json
import re
import xml.etree.cElementTree as etree
import six
import datasets
logger = datasets.logging.get_logger(__name__)
if six.PY3:
import bz2 # pylint:disable=g-import-not-at-top
else:
# py2's built-in bz2 package does not support reading from file objects.
import bz2file as bz2 # pylint:disable=g-import-not-at-top
_CITATION = """\
@ONLINE {wikidump,
author = {Wikimedia Foundation},
title = {Wikimedia Downloads},
url = {https://dumps.wikimedia.org}
}
"""
_DESCRIPTION = """\
Wikipedia dataset containing cleaned articles of all languages.
The datasets are built from the Wikipedia dump
(https://dumps.wikimedia.org/) with one split per language. Each example
contains the content of one full Wikipedia article with cleaning to strip
markdown and unwanted sections (references, etc.).
"""
_LICENSE = (
"This work is licensed under the Creative Commons Attribution-ShareAlike "
"3.0 Unported License. To view a copy of this license, visit "
"http://creativecommons.org/licenses/by-sa/3.0/ or send a letter to "
"Creative Commons, PO Box 1866, Mountain View, CA 94042, USA."
)
# Source: https://en.wikipedia.org/wiki/List_of_Wikipedias (accessed 3/1/2019)
# Removed because no articles: hz.
WIKIPEDIA_LANGUAGES = [
"aa",
"ab",
"ace",
"ady",
"af",
"ak",
"als",
"am",
"an",
"ang",
"ar",
"arc",
"arz",
"as",
"ast",
"atj",
"av",
"ay",
"az",
"azb",
"ba",
"bar",
"bat-smg",
"bcl",
"be",
"be-x-old",
"bg",
"bh",
"bi",
"bjn",
"bm",
"bn",
"bo",
"bpy",
"br",
"bs",
"bug",
"bxr",
"ca",
"cbk-zam",
"cdo",
"ce",
"ceb",
"ch",
"cho",
"chr",
"chy",
"ckb",
"co",
"cr",
"crh",
"cs",
"csb",
"cu",
"cv",
"cy",
"da",
"de",
"din",
"diq",
"dsb",
"dty",
"dv",
"dz",
"ee",
"el",
"eml",
"en",
"eo",
"es",
"et",
"eu",
"ext",
"fa",
"ff",
"fi",
"fiu-vro",
"fj",
"fo",
"fr",
"frp",
"frr",
"fur",
"fy",
"ga",
"gag",
"gan",
"gd",
"gl",
"glk",
"gn",
"gom",
"gor",
"got",
"gu",
"gv",
"ha",
"hak",
"haw",
"he",
"hi",
"hif",
"ho",
"hr",
"hsb",
"ht",
"hu",
"hy",
"ia",
"id",
"ie",
"ig",
"ii",
"ik",
"ilo",
"inh",
"io",
"is",
"it",
"iu",
"ja",
"jam",
"jbo",
"jv",
"ka",
"kaa",
"kab",
"kbd",
"kbp",
"kg",
"ki",
"kj",
"kk",
"kl",
"km",
"kn",
"ko",
"koi",
"krc",
"ks",
"ksh",
"ku",
"kv",
"kw",
"ky",
"la",
"lad",
"lb",
"lbe",
"lez",
"lfn",
"lg",
"li",
"lij",
"lmo",
"ln",
"lo",
"lrc",
"lt",
"ltg",
"lv",
"mai",
"map-bms",
"mdf",
"mg",
"mh",
"mhr",
"mi",
"min",
"mk",
"ml",
"mn",
"mr",
"mrj",
"ms",
"mt",
"mus",
"mwl",
"my",
"myv",
"mzn",
"na",
"nah",
"nap",
"nds",
"nds-nl",
"ne",
"new",
"ng",
"nl",
"nn",
"no",
"nov",
"nrm",
"nso",
"nv",
"ny",
"oc",
"olo",
"om",
"or",
"os",
"pa",
"pag",
"pam",
"pap",
"pcd",
"pdc",
"pfl",
"pi",
"pih",
"pl",
"pms",
"pnb",
"pnt",
"ps",
"pt",
"qu",
"rm",
"rmy",
"rn",
"ro",
"roa-rup",
"roa-tara",
"ru",
"rue",
"rw",
"sa",
"sah",
"sat",
"sc",
"scn",
"sco",
"sd",
"se",
"sg",
"sh",
"si",
"simple",
"sk",
"sl",
"sm",
"sn",
"so",
"sq",
"sr",
"srn",
"ss",
"st",
"stq",
"su",
"sv",
"sw",
"szl",
"ta",
"tcy",
"te",
"tet",
"tg",
"th",
"ti",
"tk",
"tl",
"tn",
"to",
"tpi",
"tr",
"ts",
"tt",
"tum",
"tw",
"ty",
"tyv",
"udm",
"ug",
"uk",
"ur",
"uz",
"ve",
"vec",
"vep",
"vi",
"vls",
"vo",
"wa",
"war",
"wo",
"wuu",
"xal",
"xh",
"xmf",
"yi",
"yo",
"za",
"zea",
"zh",
"zh-classical",
"zh-min-nan",
"zh-yue",
"zu",
]
_BASE_URL_TMPL = "https://dumps.wikimedia.org/{lang}wiki/{date}/"
_INFO_FILE = "dumpstatus.json"
class WikipediaConfig(datasets.BuilderConfig):
"""BuilderConfig for Wikipedia."""
def __init__(self, language=None, date=None, **kwargs):
"""BuilderConfig for Wikipedia.
Args:
language: string, the language code for the Wikipedia dump to use.
date: string, date of the Wikipedia dump in YYYYMMDD format. A list of
available dates can be found at https://dumps.wikimedia.org/enwiki/.
**kwargs: keyword arguments forwarded to super.
"""
super(WikipediaConfig, self).__init__(
name="{0}.{1}".format(date, language),
description="Wikipedia dataset for {0}, parsed from {1} dump.".format(language, date),
**kwargs,
)
self.date = date
self.language = language
_VERSION = datasets.Version("1.0.0", "")
class Wikipedia(datasets.BeamBasedBuilder):
"""Wikipedia dataset."""
# Use mirror (your.org) to avoid download caps.
BUILDER_CONFIG_CLASS = WikipediaConfig
BUILDER_CONFIGS = [
WikipediaConfig(
version=_VERSION,
language=lang,
date="20200501",
) # pylint:disable=g-complex-comprehension
for lang in WIKIPEDIA_LANGUAGES
]
def _info(self):
return datasets.DatasetInfo(
description=_DESCRIPTION,
features=datasets.Features({"title": datasets.Value("string"), "text": datasets.Value("string")}),
# No default supervised_keys.
supervised_keys=None,
homepage="https://dumps.wikimedia.org",
citation=_CITATION,
)
def _split_generators(self, dl_manager, pipeline):
def _base_url(lang):
return _BASE_URL_TMPL.format(lang=lang.replace("-", "_"), date=self.config.date)
lang = self.config.language
info_url = _base_url(lang) + _INFO_FILE
# Use dictionary since testing mock always returns the same result.
downloaded_files = dl_manager.download_and_extract({"info": info_url})
xml_urls = []
total_bytes = 0
with open(downloaded_files["info"], encoding="utf-8") as f:
dump_info = json.load(f)
multistream_dump_info = dump_info["jobs"]["articlesmultistreamdump"]
assert (
multistream_dump_info["status"] == "done"
), "Specified dump (%s) multistream status is not 'done': %s" % (
_base_url(lang),
multistream_dump_info["status"],
)
for fname, info in multistream_dump_info["files"].items():
if ".xml" not in fname:
continue
total_bytes += info["size"]
xml_urls.append(_base_url(lang) + fname)
# Use dictionary since testing mock always returns the same result.
downloaded_files = dl_manager.download({"xml": xml_urls})
if not pipeline.is_local():
downloaded_files = dl_manager.ship_files_with_pipeline(downloaded_files, pipeline)
return [
datasets.SplitGenerator( # pylint:disable=g-complex-comprehension
name=datasets.Split.TRAIN, gen_kwargs={"filepaths": downloaded_files["xml"], "language": lang}
)
]
def _build_pcollection(self, pipeline, filepaths, language):
"""Build PCollection of examples in the raw (text) form."""
import apache_beam as beam
import mwparserfromhell
def _extract_content(filepath):
"""Extracts article content from a single WikiMedia XML file."""
logger.info("generating examples from = %s", filepath)
with beam.io.filesystems.FileSystems.open(filepath) as f:
f = bz2.BZ2File(filename=f)
if six.PY3:
# Workaround due to:
# https://github.com/tensorflow/tensorflow/issues/33563
utf_f = codecs.getreader("utf-8")(f)
else:
utf_f = f
# To clear root, to free-up more memory than just `elem.clear()`.
context = etree.iterparse(utf_f, events=("end",))
context = iter(context)
unused_event, root = next(context)
for unused_event, elem in context:
if not elem.tag.endswith("page"):
continue
namespace = elem.tag[:-4]
title = elem.find("./{0}title".format(namespace)).text
ns = elem.find("./{0}ns".format(namespace)).text
id_ = elem.find("./{0}id".format(namespace)).text
# Filter pages that are not in the "main" namespace.
if ns != "0":
root.clear()
continue
raw_content = elem.find("./{0}revision/{0}text".format(namespace)).text
root.clear()
# Filter redirects.
if raw_content is None or raw_content.lower().startswith("#redirect"):
beam.metrics.Metrics.counter(language, "filtered-redirects").inc()
continue
beam.metrics.Metrics.counter(language, "extracted-examples").inc()
yield (id_, title, raw_content)
def _clean_content(inputs):
"""Cleans raw wikicode to extract text."""
id_, title, raw_content = inputs
try:
text = _parse_and_clean_wikicode(raw_content, parser=mwparserfromhell)
except (mwparserfromhell.parser.ParserError) as e:
beam.metrics.Metrics.counter(language, "parser-error").inc()
logger.error("mwparserfromhell ParseError: %s", e)
return
if not text:
beam.metrics.Metrics.counter(language, "empty-clean-examples").inc()
return
beam.metrics.Metrics.counter(language, "cleaned-examples").inc()
yield id_, {"title": title, "text": text}
return (
pipeline
| "Initialize" >> beam.Create(filepaths)
| "Extract content" >> beam.FlatMap(_extract_content)
| "Distribute" >> beam.transforms.Reshuffle()
| "Clean content" >> beam.FlatMap(_clean_content)
)
def _parse_and_clean_wikicode(raw_content, parser):
"""Strips formatting and unwanted sections from raw page content."""
wikicode = parser.parse(raw_content)
# Filters for references, tables, and file/image links.
re_rm_wikilink = re.compile("^(?:File|Image|Media):", flags=re.IGNORECASE | re.UNICODE)
def rm_wikilink(obj):
return bool(re_rm_wikilink.match(six.text_type(obj.title)))
def rm_tag(obj):
return six.text_type(obj.tag) in {"ref", "table"}
def rm_template(obj):
return obj.name.lower() in {"reflist", "notelist", "notelist-ua", "notelist-lr", "notelist-ur", "notelist-lg"}
def try_remove_obj(obj, section):
try:
section.remove(obj)
except ValueError:
# For unknown reasons, objects are sometimes not found.
pass
section_text = []
# Filter individual sections to clean.
for section in wikicode.get_sections(flat=True, include_lead=True, include_headings=True):
for obj in section.ifilter_wikilinks(matches=rm_wikilink, recursive=True):
try_remove_obj(obj, section)
for obj in section.ifilter_templates(matches=rm_template, recursive=True):
try_remove_obj(obj, section)
for obj in section.ifilter_tags(matches=rm_tag, recursive=True):
try_remove_obj(obj, section)
section_text.append(section.strip_code().strip())
return "\n\n".join(section_text)
| 23.310954
| 118
| 0.53206
|
e44d6b441a13aade1146752ad07c858edb02b71b
| 7,210
|
py
|
Python
|
ci/infra/testrunner/platforms/terraform.py
|
stefannica/skuba
|
6b2bb427946eeeae97bb7c723e77b29f8a3679f9
|
[
"Apache-2.0"
] | null | null | null |
ci/infra/testrunner/platforms/terraform.py
|
stefannica/skuba
|
6b2bb427946eeeae97bb7c723e77b29f8a3679f9
|
[
"Apache-2.0"
] | null | null | null |
ci/infra/testrunner/platforms/terraform.py
|
stefannica/skuba
|
6b2bb427946eeeae97bb7c723e77b29f8a3679f9
|
[
"Apache-2.0"
] | null | null | null |
import json
import logging
import os
from urllib.parse import urlparse
import hcl
from platforms.platform import Platform
from utils import (Format, step)
logger = logging.getLogger('testrunner')
class Terraform(Platform):
def __init__(self, conf, platform):
super().__init__(conf)
self.tfdir = os.path.join(self.conf.terraform.tfdir, platform)
self.tfjson_path = os.path.join(conf.workspace, "tfout.json")
self.tfout_path = os.path.join(self.conf.workspace, "tfout")
self.state = None
self.logs["files"] += ["/var/run/cloud-init/status.json",
"/var/log/cloud-init-output.log",
"/var/log/cloud-init.log"]
self.tmp_files = [self.tfout_path,
self.tfjson_path]
def destroy(self, variables=[]):
cmd = "destroy -auto-approve"
for var in variables:
cmd += f" -var {var}"
self._run_terraform_command(cmd)
def _provision_platform(self):
""" Create and apply terraform plan"""
exception = None
self._check_tf_deployed()
init_cmd = "init"
if self.conf.terraform.plugin_dir:
logger.info(f"Installing plugins from {self.conf.terraform.plugin_dir}")
init_cmd += f" -plugin-dir={self.conf.terraform.plugin_dir}"
self._run_terraform_command(init_cmd)
self._run_terraform_command("version")
self._generate_tfvars_file()
plan_cmd = f"plan -out {self.tfout_path}"
apply_cmd = f"apply -auto-approve {self.tfout_path}"
self._run_terraform_command(plan_cmd)
try:
self._run_terraform_command(apply_cmd)
except Exception as ex:
exception = ex
finally:
try:
self._fetch_terraform_output()
except Exception as inner_ex:
# don't override original exception if any
if not exception:
exception = inner_ex
if exception:
raise exception
def _load_tfstate(self):
if self.state is None:
fn = os.path.join(self.tfdir, "terraform.tfstate")
logger.debug("Reading configuration from {}".format(fn))
with open(fn) as f:
self.state = json.load(f)
def get_lb_ipaddr(self):
self._load_tfstate()
if self.state["version"] == 3:
return self.state["modules"][0]["outputs"]["ip_load_balancer"]["value"]["{}-lb".format(self.stack_name())]
elif self.state["version"] == 4:
return self.state["outputs"]["ip_load_balancer"]["value"]["{}-lb".format(self.stack_name())]
def get_num_nodes(self, role):
return len(self.get_nodes_ipaddrs(role))
def get_nodes_names(self, role):
stack_name = self.stack_name()
return [f'caasp-{role}-{stack_name}-{i}' for i in range(self.get_num_nodes(role))]
def get_nodes_ipaddrs(self, role):
self._load_tfstate()
if role not in ("master", "worker"):
raise ValueError("Invalid role: {}".format(role))
role_key = "ip_" + role + "s"
if self.state["version"] == 3:
return list(self.state["modules"][0]["outputs"][role_key]["value"].values())
elif self.state["version"] == 4:
return list(self.state["outputs"][role_key]["value"].values())
@step
def _fetch_terraform_output(self):
cmd = f"output -json >{self.tfjson_path}"
self._run_terraform_command(cmd)
def _generate_tfvars_file(self):
"""Generate terraform tfvars file"""
tfvars_template = os.path.join(self.tfdir, self.conf.terraform.tfvars)
tfvars_final = os.path.join(self.tfdir, "terraform.tfvars.json")
with open(tfvars_template) as f:
if '.json' in os.path.basename(tfvars_template).lower():
tfvars = json.load(f)
else:
tfvars = hcl.load(f)
self._update_tfvars(tfvars)
with open(tfvars_final, "w") as f:
json.dump(tfvars, f)
# take up to 45 characters from stackname to give room to the fixed part
# in the node name: caasp-[master|worker]-<stack name>-xxx (total length
# must be <= 63).
# Also ensure that only valid character are present and that the string
# starts and ends with alphanumeric characters and all lowercase.
def stack_name(self):
stack_name = self.conf.terraform.stack_name[:45]
stack_name = stack_name.replace("_","-").replace("/","-")
stack_name = stack_name.strip("-.")
stack_name = stack_name.lower()
return stack_name
def _update_tfvars(self, tfvars):
new_vars = {
"internal_net": self.conf.terraform.internal_net,
"stack_name": self.stack_name(),
"username": self.conf.terraform.nodeuser,
"masters": self.conf.terraform.master.count,
"workers": self.conf.terraform.worker.count,
"authorized_keys": [self.utils.authorized_keys()]
}
for k, v in new_vars.items():
if tfvars.get(k) is not None:
if isinstance(v, list):
tfvars[k] = tfvars[k] + v
elif isinstance(v, dict):
tfvars[k].update(v)
else:
tfvars[k] = v
# if registry code specified, repositories are not needed
if self.conf.packages.registry_code:
tfvars["caasp_registry_code"] = self.conf.packages.registry_code
tfvars["repositories"] = {}
repos = tfvars.get("repositories", {})
if self.conf.packages.additional_repos:
for name, url in self.conf.packages.additional_repos.items():
repos[name] = url
# Update mirror urls
if self.conf.packages.mirror and repos:
for name, url in repos.items():
url_parsed = urlparse(url)
url_updated = url_parsed._replace(netloc=self.conf.packages.mirror)
tfvars["repositories"][name] = url_updated.geturl()
if self.conf.packages.additional_pkgs:
tfvars["packages"].extend(self.conf.packages.additional_pkgs)
def _run_terraform_command(self, cmd, env={}):
"""Running terraform command in {terraform.tfdir}/{platform}"""
cmd = f'{self._env_setup_cmd()}; terraform {cmd}'
# Terraform needs PATH and SSH_AUTH_SOCK
sock_fn = self.utils.ssh_sock_fn()
env["SSH_AUTH_SOCK"] = sock_fn
env["PATH"] = os.environ['PATH']
self.utils.runshellcommand(cmd, cwd=self.tfdir, env=env)
def _check_tf_deployed(self):
if os.path.exists(self.tfjson_path):
raise Exception(Format.alert(f"tf file found. Please run cleanup and try again {self.tfjson_path}"))
# TODO: this function is currently not used. Identify points where it should
# be invoked
def _verify_tf_dependency(self):
if not os.path.exists(self.tfjson_path):
raise Exception(Format.alert("tf file not found. Please run terraform and try again{}"))
| 36.785714
| 118
| 0.602497
|
b1426534af4edc5b5f178e8ea96707d265f39aef
| 4,766
|
py
|
Python
|
ckanext/datapusher/plugin.py
|
RabiaSajjad/ckan
|
ada55a5b32916f07ce04a6e1b51efeca848f68da
|
[
"BSD-3-Clause"
] | null | null | null |
ckanext/datapusher/plugin.py
|
RabiaSajjad/ckan
|
ada55a5b32916f07ce04a6e1b51efeca848f68da
|
[
"BSD-3-Clause"
] | null | null | null |
ckanext/datapusher/plugin.py
|
RabiaSajjad/ckan
|
ada55a5b32916f07ce04a6e1b51efeca848f68da
|
[
"BSD-3-Clause"
] | null | null | null |
# encoding: utf-8
from __future__ import annotations
from ckan.common import CKANConfig
from ckan.types import Action, AuthFunction, Context
import logging
from typing import Any, Callable, cast
import ckan.model as model
import ckan.plugins as p
import ckanext.datapusher.views as views
import ckanext.datapusher.helpers as helpers
import ckanext.datapusher.logic.action as action
import ckanext.datapusher.logic.auth as auth
log = logging.getLogger(__name__)
class DatastoreException(Exception):
pass
@p.toolkit.blanket.config_declarations
class DatapusherPlugin(p.SingletonPlugin):
p.implements(p.IConfigurer, inherit=True)
p.implements(p.IConfigurable, inherit=True)
p.implements(p.IActions)
p.implements(p.IAuthFunctions)
p.implements(p.IResourceUrlChange)
p.implements(p.IResourceController, inherit=True)
p.implements(p.ITemplateHelpers)
p.implements(p.IBlueprint)
legacy_mode = False
resource_show_action = None
def update_config(self, config: CKANConfig):
templates_base = config.get_value(u'ckan.base_templates_folder')
p.toolkit.add_template_directory(config, templates_base)
def configure(self, config: CKANConfig):
self.config = config
for config_option in (
u'ckan.site_url',
u'ckan.datapusher.url',
):
if not config.get_value(config_option):
raise Exception(
u'Config option `{0}` must be set to use the DataPusher.'.
format(config_option)
)
# IResourceUrlChange
def notify(self, resource: model.Resource):
context = cast(Context, {
u'model': model,
u'ignore_auth': True,
})
resource_dict = p.toolkit.get_action(u'resource_show')(
context, {
u'id': resource.id,
}
)
self._submit_to_datapusher(resource_dict)
# IResourceController
def after_resource_create(
self, context: Context, resource_dict: dict[str, Any]):
self._submit_to_datapusher(resource_dict)
def _submit_to_datapusher(self, resource_dict: dict[str, Any]):
context = cast(Context, {
u'model': model,
u'ignore_auth': True,
u'defer_commit': True
})
resource_format = resource_dict.get('format')
supported_formats = p.toolkit.config.get_value(
'ckan.datapusher.formats'
)
submit = (
resource_format
and resource_format.lower() in supported_formats
and resource_dict.get('url_type') != u'datapusher'
)
if not submit:
return
try:
task = p.toolkit.get_action(u'task_status_show')(
context, {
u'entity_id': resource_dict['id'],
u'task_type': u'datapusher',
u'key': u'datapusher'
}
)
if task.get(u'state') in (u'pending', u'submitting'):
# There already is a pending DataPusher submission,
# skip this one ...
log.debug(
u'Skipping DataPusher submission for '
u'resource {0}'.format(resource_dict['id'])
)
return
except p.toolkit.ObjectNotFound:
pass
try:
log.debug(
u'Submitting resource {0}'.format(resource_dict['id']) +
u' to DataPusher'
)
p.toolkit.get_action(u'datapusher_submit')(
context, {
u'resource_id': resource_dict['id']
}
)
except p.toolkit.ValidationError as e:
# If datapusher is offline want to catch error instead
# of raising otherwise resource save will fail with 500
log.critical(e)
pass
def get_actions(self) -> dict[str, Action]:
return {
u'datapusher_submit': action.datapusher_submit,
u'datapusher_hook': action.datapusher_hook,
u'datapusher_status': action.datapusher_status
}
def get_auth_functions(self) -> dict[str, AuthFunction]:
return {
u'datapusher_submit': auth.datapusher_submit,
u'datapusher_status': auth.datapusher_status
}
def get_helpers(self) -> dict[str, Callable[..., Any]]:
return {
u'datapusher_status': helpers.datapusher_status,
u'datapusher_status_description': helpers.
datapusher_status_description,
}
# IBlueprint
def get_blueprint(self):
return views.get_blueprints()
| 30.551282
| 78
| 0.594629
|
d6bee16ecd1bcb5d34642764642248ed9f004254
| 8,873
|
py
|
Python
|
setup.py
|
sgframework/sgpy
|
7f8cb8e1535cfe01a166b4c3a456ad68b66106d7
|
[
"MIT"
] | 3
|
2019-05-23T16:43:31.000Z
|
2019-05-23T16:48:39.000Z
|
setup.py
|
sgframework/sgpy
|
7f8cb8e1535cfe01a166b4c3a456ad68b66106d7
|
[
"MIT"
] | 3
|
2020-03-24T17:05:30.000Z
|
2021-02-02T21:59:46.000Z
|
setup.py
|
sgframework/sgpy
|
7f8cb8e1535cfe01a166b4c3a456ad68b66106d7
|
[
"MIT"
] | 1
|
2019-05-23T16:43:33.000Z
|
2019-05-23T16:43:33.000Z
|
"""A setuptools based setup module.
See:
https://packaging.python.org/guides/distributing-packages-using-setuptools/
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
from os import path
# io.open is needed for projects that support Python 2.7
# It ensures open() defaults to text mode with universal newlines,
# and accepts an argument to specify the text encoding
# Python 3 only projects can skip this import
from io import open
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name='sgpy', # Required
# Versions should comply with PEP 440:
# https://www.python.org/dev/peps/pep-0440/
#
# For a discussion on single-sourcing the version across setup.py and the
# project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version='1.1.1', # Required
# This is a one-line description or tagline of what your project does. This
# corresponds to the "Summary" metadata field:
# https://packaging.python.org/specifications/core-metadata/#summary
description='Python project extending main framework functionalities', # Optional
# This is an optional longer description of your project that represents
# the body of text which users will see when they visit PyPI.
#
# Often, this is the same as your README, so you can just read it in from
# that file directly (as we have already done above)
#
# This field corresponds to the "Description" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-optional
long_description=long_description, # Optional
# Denotes that our long_description is in Markdown; valid values are
# text/plain, text/x-rst, and text/markdown
#
# Optional if long_description is written in reStructuredText (rst) but
# required for plain-text or Markdown; if unspecified, "applications should
# attempt to render [the long_description] as text/x-rst; charset=UTF-8 and
# fall back to text/plain if it is not valid rst" (see link below)
#
# This field corresponds to the "Description-Content-Type" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-content-type-optional
long_description_content_type='text/markdown', # Optional (see note above)
# This should be a valid link to your project's main homepage.
#
# This field corresponds to the "Home-Page" metadata field:
# https://packaging.python.org/specifications/core-metadata/#home-page-optional
url='https://github.com/sgframework/sgpy', # Optional
# This should be your name or the name of the organization which owns the
# project.
author='sgpy', # Optional
# This should be a valid email address corresponding to the author listed
# above.
author_email='ads@ipool.remotewebaccess.com', # Optional
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see https://pypi.org/classifiers/
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
# 'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
# 'Intended Audience :: Developers',
# 'Topic :: Software Development :: Build Tools',
# Pick your license as you wish
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
# These classifiers are *not* checked by 'pip install'. See instead
# 'python_requires' below.
# 'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
# This field adds keywords for your project which will appear on the
# project page. What does your project relate to?
#
# Note that this is a string of words separated by whitespace, not a list.
keywords='SGPython development environment', # Optional
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
packages=find_packages('sgapi',exclude=['contrib', 'tests']), # Required
# Specify which Python versions you support. In contrast to the
# 'Programming Language' classifiers above, 'pip install' will check this
# and refuse to install the project if the version does not match. If you
# do not support Python 2, you can simplify this to '>=3.5' or similar, see
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4',
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=['peppercorn'], # Optional
# List additional groups of dependencies here (e.g. development
# dependencies). Users will be able to install these using the "extras"
# syntax, for example:
#
# $ pip install sampleproject[dev]
#
# Similar to `install_requires` above, these must be valid existing
# projects.
extras_require={ # Optional
'dev': ['check-manifest'],
'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here.
#
# If using Python 2.6 or earlier, then these have to be included in
# MANIFEST.in as well.
package_data={ # Optional
'sgapi': ['data/*.dat', 'favicon.ico'],
},
package_dir={'sgapi': 'src/sgapi'},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
#
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[('data', ['src/sgapi/data/flash.dat'])], # Optional
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
entry_points={ # Optional
'console_scripts': [
'sgpy=sgapi:main',
],
},
# List additional URLs that are relevant to your project as a dict.
#
# This field corresponds to the "Project-URL" metadata fields:
# https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use
#
# Examples listed include a pattern for specifying where the package tracks
# issues, where the source is hosted, where to say thanks to the package
# maintainers, and where to support the project financially. The key is
# what's used to render the link text on PyPI.
project_urls={ # Optional
'Bug Reports': 'https://github.com/sgframework/sgpy/issues',
'Funding': 'https://donate.pypi.org',
'Say Thanks!': 'http://saythanks.io/to/sgframework',
},
)
| 43.495098
| 99
| 0.673278
|
752bf0adcb19ea0d69832210d2096d6fe68c3cf6
| 427
|
py
|
Python
|
buglab/utils/fileopen.py
|
microsoft/neurips21-self-supervised-bug-detection-and-repair
|
4e51184a63aecd19174ee40fc6433260ab73d56e
|
[
"MIT"
] | 47
|
2021-10-19T16:15:41.000Z
|
2022-03-21T11:51:43.000Z
|
buglab/utils/fileopen.py
|
microsoft/neurips21-self-supervised-bug-detection-and-repair
|
4e51184a63aecd19174ee40fc6433260ab73d56e
|
[
"MIT"
] | 2
|
2022-01-10T09:41:44.000Z
|
2022-03-09T12:54:55.000Z
|
buglab/utils/fileopen.py
|
microsoft/neurips21-self-supervised-bug-detection-and-repair
|
4e51184a63aecd19174ee40fc6433260ab73d56e
|
[
"MIT"
] | 11
|
2021-11-30T13:25:03.000Z
|
2022-03-16T11:38:08.000Z
|
from os import PathLike
from chardet import UniversalDetector
def detect_encoding_and_open(filepath: PathLike):
detector = UniversalDetector()
with open(filepath, "rb") as rawdata:
detector.reset()
for line in rawdata.readlines():
detector.feed(line)
if detector.done:
break
detector.close()
return open(filepath, encoding=detector.result["encoding"])
| 26.6875
| 63
| 0.662763
|
a703911a9ca2ffc22b14b29be8d6c88b22c45346
| 195
|
py
|
Python
|
sample_quepy_project/sample_quepy_project/dsl.py
|
akanimax/NLP2SQL
|
3a136f75621cc94076b981a50f18d5a459185271
|
[
"MIT"
] | 11
|
2017-09-07T15:35:26.000Z
|
2021-08-16T03:49:00.000Z
|
sample_quepy_project/sample_quepy_project/dsl.py
|
akanimax/NLP2SQL
|
3a136f75621cc94076b981a50f18d5a459185271
|
[
"MIT"
] | 1
|
2017-12-19T09:39:16.000Z
|
2018-07-18T07:03:06.000Z
|
sample_quepy_project/sample_quepy_project/dsl.py
|
akanimax/NLP2SQL
|
3a136f75621cc94076b981a50f18d5a459185271
|
[
"MIT"
] | 5
|
2017-10-23T05:48:16.000Z
|
2020-06-05T10:41:00.000Z
|
# coding: utf-8
"""
Domain specific language for sample_quepy_project quepy.
"""
from quepy.dsl import FixedRelation
class IsDefinedIn(FixedRelation):
relation = " = "
reverse = True
| 15
| 56
| 0.712821
|
431bfd9340ec9d7634e65f48977705de336b53cc
| 132,621
|
py
|
Python
|
dm_protobuf/dm_pb2.py
|
DNLINYJ/Biilibili_All_Danmu
|
049697d73a9de43cee6fa47d3d8364d0e0e62642
|
[
"Apache-2.0"
] | 13
|
2021-08-21T03:49:56.000Z
|
2022-03-21T07:07:40.000Z
|
dm_protobuf/dm_pb2.py
|
DNLINYJ/Biilibili_All_Danmu
|
049697d73a9de43cee6fa47d3d8364d0e0e62642
|
[
"Apache-2.0"
] | 1
|
2021-11-13T11:02:00.000Z
|
2021-11-13T11:02:00.000Z
|
dm_protobuf/dm_pb2.py
|
DNLINYJ/Biilibili_All_Danmu
|
049697d73a9de43cee6fa47d3d8364d0e0e62642
|
[
"Apache-2.0"
] | 2
|
2021-08-21T03:49:57.000Z
|
2022-02-23T02:48:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dm.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='dm.proto',
package='bilibili.community.service.dm.v1',
syntax='proto3',
serialized_options=None,
serialized_pb=_b('\n\x08\x64m.proto\x12 bilibili.community.service.dm.v1\"L\n\x0b\x44mSegSDKReq\x12\x0b\n\x03pid\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0c\n\x04type\x18\x03 \x01(\x05\x12\x15\n\rsegment_index\x18\x04 \x01(\x03\"]\n\rDmSegSDKReply\x12\x0e\n\x06\x63losed\x18\x01 \x01(\x08\x12<\n\x05\x65lems\x18\x02 \x03(\x0b\x32-.bilibili.community.service.dm.v1.DanmakuElem\"L\n\x0b\x44mSegOttReq\x12\x0b\n\x03pid\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0c\n\x04type\x18\x03 \x01(\x05\x12\x15\n\rsegment_index\x18\x04 \x01(\x03\"]\n\rDmSegOttReply\x12\x0e\n\x06\x63losed\x18\x01 \x01(\x08\x12<\n\x05\x65lems\x18\x02 \x03(\x0b\x32-.bilibili.community.service.dm.v1.DanmakuElem\"g\n\x0e\x44mSegMobileReq\x12\x0b\n\x03pid\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0c\n\x04type\x18\x03 \x01(\x05\x12\x15\n\rsegment_index\x18\x04 \x01(\x03\x12\x16\n\x0eteenagers_mode\x18\x05 \x01(\x05\"\xa1\x01\n\x10\x44mSegMobileReply\x12<\n\x05\x65lems\x18\x01 \x03(\x0b\x32-.bilibili.community.service.dm.v1.DanmakuElem\x12\r\n\x05state\x18\x02 \x01(\x05\x12@\n\x07\x61i_flag\x18\x03 \x01(\x0b\x32/.bilibili.community.service.dm.v1.DanmakuAIFlag\"X\n\tDmViewReq\x12\x0b\n\x03pid\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0c\n\x04type\x18\x03 \x01(\x05\x12\r\n\x05spmid\x18\x04 \x01(\t\x12\x14\n\x0cis_hard_boot\x18\x05 \x01(\x05\"\xf0\x03\n\x0b\x44mViewReply\x12\x0e\n\x06\x63losed\x18\x01 \x01(\x08\x12\x39\n\x04mask\x18\x02 \x01(\x0b\x32+.bilibili.community.service.dm.v1.VideoMask\x12\x41\n\x08subtitle\x18\x03 \x01(\x0b\x32/.bilibili.community.service.dm.v1.VideoSubtitle\x12\x13\n\x0bspecial_dms\x18\x04 \x03(\t\x12\x44\n\x07\x61i_flag\x18\x05 \x01(\x0b\x32\x33.bilibili.community.service.dm.v1.DanmakuFlagConfig\x12N\n\rplayer_config\x18\x06 \x01(\x0b\x32\x37.bilibili.community.service.dm.v1.DanmuPlayerViewConfig\x12\x16\n\x0esend_box_style\x18\x07 \x01(\x05\x12\r\n\x05\x61llow\x18\x08 \x01(\x08\x12\x11\n\tcheck_box\x18\t \x01(\t\x12\x1a\n\x12\x63heck_box_show_msg\x18\n \x01(\t\x12\x18\n\x10text_placeholder\x18\x0b \x01(\t\x12\x19\n\x11input_placeholder\x18\x0c \x01(\t\x12\x1d\n\x15report_filter_content\x18\r \x03(\t\"\xa8\x03\n\x0e\x44mWebViewReply\x12\r\n\x05state\x18\x01 \x01(\x05\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x11\n\ttext_side\x18\x03 \x01(\t\x12=\n\x06\x64m_sge\x18\x04 \x01(\x0b\x32-.bilibili.community.service.dm.v1.DmSegConfig\x12\x41\n\x04\x66lag\x18\x05 \x01(\x0b\x32\x33.bilibili.community.service.dm.v1.DanmakuFlagConfig\x12\x13\n\x0bspecial_dms\x18\x06 \x03(\t\x12\x11\n\tcheck_box\x18\x07 \x01(\x08\x12\r\n\x05\x63ount\x18\x08 \x01(\x03\x12?\n\ncommandDms\x18\t \x03(\x0b\x32+.bilibili.community.service.dm.v1.CommandDm\x12M\n\rplayer_config\x18\n \x01(\x0b\x32\x36.bilibili.community.service.dm.v1.DanmuWebPlayerConfig\x12\x1d\n\x15report_filter_content\x18\x0b \x03(\t\"\xa1\x01\n\tCommandDm\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0b\n\x03oid\x18\x02 \x01(\x03\x12\x0b\n\x03mid\x18\x03 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x04 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x05 \x01(\t\x12\x10\n\x08progress\x18\x06 \x01(\x05\x12\r\n\x05\x63time\x18\x07 \x01(\t\x12\r\n\x05mtime\x18\x08 \x01(\t\x12\r\n\x05\x65xtra\x18\t \x01(\t\x12\r\n\x05idStr\x18\n \x01(\t\"/\n\x0b\x44mSegConfig\x12\x11\n\tpage_size\x18\x01 \x01(\x03\x12\r\n\x05total\x18\x02 \x01(\x03\"S\n\tVideoMask\x12\x0b\n\x03\x63id\x18\x01 \x01(\x03\x12\x0c\n\x04plat\x18\x02 \x01(\x05\x12\x0b\n\x03\x66ps\x18\x03 \x01(\x05\x12\x0c\n\x04time\x18\x04 \x01(\x03\x12\x10\n\x08mask_url\x18\x05 \x01(\t\"o\n\rVideoSubtitle\x12\x0b\n\x03lan\x18\x01 \x01(\t\x12\x0e\n\x06lanDoc\x18\x02 \x01(\t\x12\x41\n\tsubtitles\x18\x03 \x03(\x0b\x32..bilibili.community.service.dm.v1.SubtitleItem\"\x8f\x03\n\x14\x44\x61nmuWebPlayerConfig\x12\x11\n\tdm_switch\x18\x01 \x01(\x08\x12\x11\n\tai_switch\x18\x02 \x01(\x08\x12\x10\n\x08\x61i_level\x18\x03 \x01(\x05\x12\x10\n\x08\x62locktop\x18\x04 \x01(\x08\x12\x13\n\x0b\x62lockscroll\x18\x05 \x01(\x08\x12\x13\n\x0b\x62lockbottom\x18\x06 \x01(\x08\x12\x12\n\nblockcolor\x18\x07 \x01(\x08\x12\x14\n\x0c\x62lockspecial\x18\x08 \x01(\x08\x12\x14\n\x0cpreventshade\x18\t \x01(\x08\x12\r\n\x05\x64mask\x18\n \x01(\x08\x12\x0f\n\x07opacity\x18\x0b \x01(\x02\x12\x0e\n\x06\x64marea\x18\x0c \x01(\x05\x12\x11\n\tspeedplus\x18\r \x01(\x02\x12\x10\n\x08\x66ontsize\x18\x0e \x01(\x02\x12\x12\n\nscreensync\x18\x0f \x01(\x08\x12\x11\n\tspeedsync\x18\x10 \x01(\x08\x12\x12\n\nfontfamily\x18\x11 \x01(\t\x12\x0c\n\x04\x62old\x18\x12 \x01(\x08\x12\x12\n\nfontborder\x18\x13 \x01(\x05\x12\x11\n\tdraw_type\x18\x14 \x01(\t\"\x9a\x01\n\x0cSubtitleItem\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0e\n\x06id_str\x18\x02 \x01(\t\x12\x0b\n\x03lan\x18\x03 \x01(\t\x12\x0f\n\x07lan_doc\x18\x04 \x01(\t\x12\x14\n\x0csubtitle_url\x18\x05 \x01(\t\x12:\n\x06\x61uthor\x18\x06 \x01(\x0b\x32*.bilibili.community.service.dm.v1.UserInfo\"\\\n\x08UserInfo\x12\x0b\n\x03mid\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0b\n\x03sex\x18\x03 \x01(\t\x12\x0c\n\x04\x66\x61\x63\x65\x18\x04 \x01(\t\x12\x0c\n\x04sign\x18\x05 \x01(\t\x12\x0c\n\x04rank\x18\x06 \x01(\x05\"\xd6\x01\n\x0b\x44\x61nmakuElem\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x10\n\x08progress\x18\x02 \x01(\x05\x12\x0c\n\x04mode\x18\x03 \x01(\x05\x12\x10\n\x08\x66ontsize\x18\x04 \x01(\x05\x12\r\n\x05\x63olor\x18\x05 \x01(\r\x12\x0f\n\x07midHash\x18\x06 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x07 \x01(\t\x12\r\n\x05\x63time\x18\x08 \x01(\x03\x12\x0e\n\x06weight\x18\t \x01(\x05\x12\x0e\n\x06\x61\x63tion\x18\n \x01(\t\x12\x0c\n\x04pool\x18\x0b \x01(\x05\x12\r\n\x05idStr\x18\x0c \x01(\t\x12\x0c\n\x04\x61ttr\x18\r \x01(\x05\"\xa0\x0b\n\x11\x44mPlayerConfigReq\x12\n\n\x02ts\x18\x01 \x01(\x03\x12\x45\n\x06switch\x18\x02 \x01(\x0b\x32\x35.bilibili.community.service.dm.v1.PlayerDanmakuSwitch\x12N\n\x0bswitch_save\x18\x03 \x01(\x0b\x32\x39.bilibili.community.service.dm.v1.PlayerDanmakuSwitchSave\x12[\n\x12use_default_config\x18\x04 \x01(\x0b\x32?.bilibili.community.service.dm.v1.PlayerDanmakuUseDefaultConfig\x12\x61\n\x15\x61i_recommended_switch\x18\x05 \x01(\x0b\x32\x42.bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedSwitch\x12_\n\x14\x61i_recommended_level\x18\x06 \x01(\x0b\x32\x41.bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedLevel\x12I\n\x08\x62locktop\x18\x07 \x01(\x0b\x32\x37.bilibili.community.service.dm.v1.PlayerDanmakuBlocktop\x12O\n\x0b\x62lockscroll\x18\x08 \x01(\x0b\x32:.bilibili.community.service.dm.v1.PlayerDanmakuBlockscroll\x12O\n\x0b\x62lockbottom\x18\t \x01(\x0b\x32:.bilibili.community.service.dm.v1.PlayerDanmakuBlockbottom\x12S\n\rblockcolorful\x18\n \x01(\x0b\x32<.bilibili.community.service.dm.v1.PlayerDanmakuBlockcolorful\x12O\n\x0b\x62lockrepeat\x18\x0b \x01(\x0b\x32:.bilibili.community.service.dm.v1.PlayerDanmakuBlockrepeat\x12Q\n\x0c\x62lockspecial\x18\x0c \x01(\x0b\x32;.bilibili.community.service.dm.v1.PlayerDanmakuBlockspecial\x12G\n\x07opacity\x18\r \x01(\x0b\x32\x36.bilibili.community.service.dm.v1.PlayerDanmakuOpacity\x12S\n\rscalingfactor\x18\x0e \x01(\x0b\x32<.bilibili.community.service.dm.v1.PlayerDanmakuScalingfactor\x12\x45\n\x06\x64omain\x18\x0f \x01(\x0b\x32\x35.bilibili.community.service.dm.v1.PlayerDanmakuDomain\x12\x43\n\x05speed\x18\x10 \x01(\x0b\x32\x34.bilibili.community.service.dm.v1.PlayerDanmakuSpeed\x12W\n\x0f\x65nableblocklist\x18\x11 \x01(\x0b\x32>.bilibili.community.service.dm.v1.PlayerDanmakuEnableblocklist\x12^\n\x19inlinePlayerDanmakuSwitch\x18\x12 \x01(\x0b\x32;.bilibili.community.service.dm.v1.InlinePlayerDanmakuSwitch\")\n\x08Response\x12\x0c\n\x04\x63ode\x18\x01 \x01(\x05\x12\x0f\n\x07message\x18\x02 \x01(\t\")\n\x0b\x44\x61nmakuFlag\x12\x0c\n\x04\x64mid\x18\x01 \x01(\x03\x12\x0c\n\x04\x66lag\x18\x02 \x01(\r\"K\n\x11\x44\x61nmakuFlagConfig\x12\x10\n\x08rec_flag\x18\x01 \x01(\x05\x12\x10\n\x08rec_text\x18\x02 \x01(\t\x12\x12\n\nrec_switch\x18\x03 \x01(\x05\"P\n\rDanmakuAIFlag\x12?\n\x08\x64m_flags\x18\x01 \x03(\x0b\x32-.bilibili.community.service.dm.v1.DanmakuFlag\"\xb1\x02\n\x15\x44\x61nmuPlayerViewConfig\x12\x61\n\x1d\x64\x61nmuku_default_player_config\x18\x01 \x01(\x0b\x32:.bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig\x12R\n\x15\x64\x61nmuku_player_config\x18\x02 \x01(\x0b\x32\x33.bilibili.community.service.dm.v1.DanmuPlayerConfig\x12\x61\n\x1d\x64\x61nmuku_player_dynamic_config\x18\x03 \x03(\x0b\x32:.bilibili.community.service.dm.v1.DanmuPlayerDynamicConfig\"\xa1\x04\n\x18\x44\x61nmuDefaultPlayerConfig\x12)\n!player_danmaku_use_default_config\x18\x01 \x01(\x08\x12,\n$player_danmaku_ai_recommended_switch\x18\x04 \x01(\x08\x12+\n#player_danmaku_ai_recommended_level\x18\x05 \x01(\x05\x12\x1f\n\x17player_danmaku_blocktop\x18\x06 \x01(\x08\x12\"\n\x1aplayer_danmaku_blockscroll\x18\x07 \x01(\x08\x12\"\n\x1aplayer_danmaku_blockbottom\x18\x08 \x01(\x08\x12$\n\x1cplayer_danmaku_blockcolorful\x18\t \x01(\x08\x12\"\n\x1aplayer_danmaku_blockrepeat\x18\n \x01(\x08\x12#\n\x1bplayer_danmaku_blockspecial\x18\x0b \x01(\x08\x12\x1e\n\x16player_danmaku_opacity\x18\x0c \x01(\x02\x12$\n\x1cplayer_danmaku_scalingfactor\x18\r \x01(\x02\x12\x1d\n\x15player_danmaku_domain\x18\x0e \x01(\x02\x12\x1c\n\x14player_danmaku_speed\x18\x0f \x01(\x05\x12$\n\x1cinline_player_danmaku_switch\x18\x10 \x01(\x08\"\xab\x05\n\x11\x44\x61nmuPlayerConfig\x12\x1d\n\x15player_danmaku_switch\x18\x01 \x01(\x08\x12\"\n\x1aplayer_danmaku_switch_save\x18\x02 \x01(\x08\x12)\n!player_danmaku_use_default_config\x18\x03 \x01(\x08\x12,\n$player_danmaku_ai_recommended_switch\x18\x04 \x01(\x08\x12+\n#player_danmaku_ai_recommended_level\x18\x05 \x01(\x05\x12\x1f\n\x17player_danmaku_blocktop\x18\x06 \x01(\x08\x12\"\n\x1aplayer_danmaku_blockscroll\x18\x07 \x01(\x08\x12\"\n\x1aplayer_danmaku_blockbottom\x18\x08 \x01(\x08\x12$\n\x1cplayer_danmaku_blockcolorful\x18\t \x01(\x08\x12\"\n\x1aplayer_danmaku_blockrepeat\x18\n \x01(\x08\x12#\n\x1bplayer_danmaku_blockspecial\x18\x0b \x01(\x08\x12\x1e\n\x16player_danmaku_opacity\x18\x0c \x01(\x02\x12$\n\x1cplayer_danmaku_scalingfactor\x18\r \x01(\x02\x12\x1d\n\x15player_danmaku_domain\x18\x0e \x01(\x02\x12\x1c\n\x14player_danmaku_speed\x18\x0f \x01(\x05\x12&\n\x1eplayer_danmaku_enableblocklist\x18\x10 \x01(\x08\x12$\n\x1cinline_player_danmaku_switch\x18\x11 \x01(\x08\x12$\n\x1cinline_player_danmaku_config\x18\x12 \x01(\x05\"K\n\x18\x44\x61nmuPlayerDynamicConfig\x12\x10\n\x08progress\x18\x01 \x01(\x05\x12\x1d\n\x15player_danmaku_domain\x18\x02 \x01(\x02\"7\n\x13PlayerDanmakuSwitch\x12\r\n\x05value\x18\x01 \x01(\x08\x12\x11\n\tcanIgnore\x18\x02 \x01(\x08\"(\n\x17PlayerDanmakuSwitchSave\x12\r\n\x05value\x18\x01 \x01(\x08\".\n\x1dPlayerDanmakuUseDefaultConfig\x12\r\n\x05value\x18\x01 \x01(\x08\"1\n PlayerDanmakuAiRecommendedSwitch\x12\r\n\x05value\x18\x01 \x01(\x08\"0\n\x1fPlayerDanmakuAiRecommendedLevel\x12\r\n\x05value\x18\x01 \x01(\x08\"&\n\x15PlayerDanmakuBlocktop\x12\r\n\x05value\x18\x01 \x01(\x08\")\n\x18PlayerDanmakuBlockscroll\x12\r\n\x05value\x18\x01 \x01(\x08\")\n\x18PlayerDanmakuBlockbottom\x12\r\n\x05value\x18\x01 \x01(\x08\"+\n\x1aPlayerDanmakuBlockcolorful\x12\r\n\x05value\x18\x01 \x01(\x08\")\n\x18PlayerDanmakuBlockrepeat\x12\r\n\x05value\x18\x01 \x01(\x08\"*\n\x19PlayerDanmakuBlockspecial\x12\r\n\x05value\x18\x01 \x01(\x08\"%\n\x14PlayerDanmakuOpacity\x12\r\n\x05value\x18\x01 \x01(\x02\"+\n\x1aPlayerDanmakuScalingfactor\x12\r\n\x05value\x18\x01 \x01(\x02\"$\n\x13PlayerDanmakuDomain\x12\r\n\x05value\x18\x01 \x01(\x02\"#\n\x12PlayerDanmakuSpeed\x12\r\n\x05value\x18\x01 \x01(\x05\"-\n\x1cPlayerDanmakuEnableblocklist\x12\r\n\x05value\x18\x01 \x01(\x08\"*\n\x19InlinePlayerDanmakuSwitch\x12\r\n\x05value\x18\x01 \x01(\x08*L\n\tDMAttrBit\x12\x14\n\x10\x44MAttrBitProtect\x10\x00\x12\x15\n\x11\x44MAttrBitFromLive\x10\x01\x12\x12\n\x0e\x44MAttrHighLike\x10\x02\x32\xaa\x04\n\x02\x44M\x12s\n\x0b\x44mSegMobile\x12\x30.bilibili.community.service.dm.v1.DmSegMobileReq\x1a\x32.bilibili.community.service.dm.v1.DmSegMobileReply\x12\x64\n\x06\x44mView\x12+.bilibili.community.service.dm.v1.DmViewReq\x1a-.bilibili.community.service.dm.v1.DmViewReply\x12q\n\x0e\x44mPlayerConfig\x12\x33.bilibili.community.service.dm.v1.DmPlayerConfigReq\x1a*.bilibili.community.service.dm.v1.Response\x12j\n\x08\x44mSegOtt\x12-.bilibili.community.service.dm.v1.DmSegOttReq\x1a/.bilibili.community.service.dm.v1.DmSegOttReply\x12j\n\x08\x44mSegSDK\x12-.bilibili.community.service.dm.v1.DmSegSDKReq\x1a/.bilibili.community.service.dm.v1.DmSegSDKReplyb\x06proto3')
)
_DMATTRBIT = _descriptor.EnumDescriptor(
name='DMAttrBit',
full_name='bilibili.community.service.dm.v1.DMAttrBit',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='DMAttrBitProtect', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DMAttrBitFromLive', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DMAttrHighLike', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=7021,
serialized_end=7097,
)
_sym_db.RegisterEnumDescriptor(_DMATTRBIT)
DMAttrBit = enum_type_wrapper.EnumTypeWrapper(_DMATTRBIT)
DMAttrBitProtect = 0
DMAttrBitFromLive = 1
DMAttrHighLike = 2
_DMSEGSDKREQ = _descriptor.Descriptor(
name='DmSegSDKReq',
full_name='bilibili.community.service.dm.v1.DmSegSDKReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pid', full_name='bilibili.community.service.dm.v1.DmSegSDKReq.pid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.community.service.dm.v1.DmSegSDKReq.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.community.service.dm.v1.DmSegSDKReq.type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='segment_index', full_name='bilibili.community.service.dm.v1.DmSegSDKReq.segment_index', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=46,
serialized_end=122,
)
_DMSEGSDKREPLY = _descriptor.Descriptor(
name='DmSegSDKReply',
full_name='bilibili.community.service.dm.v1.DmSegSDKReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='closed', full_name='bilibili.community.service.dm.v1.DmSegSDKReply.closed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='elems', full_name='bilibili.community.service.dm.v1.DmSegSDKReply.elems', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=124,
serialized_end=217,
)
_DMSEGOTTREQ = _descriptor.Descriptor(
name='DmSegOttReq',
full_name='bilibili.community.service.dm.v1.DmSegOttReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pid', full_name='bilibili.community.service.dm.v1.DmSegOttReq.pid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.community.service.dm.v1.DmSegOttReq.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.community.service.dm.v1.DmSegOttReq.type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='segment_index', full_name='bilibili.community.service.dm.v1.DmSegOttReq.segment_index', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=219,
serialized_end=295,
)
_DMSEGOTTREPLY = _descriptor.Descriptor(
name='DmSegOttReply',
full_name='bilibili.community.service.dm.v1.DmSegOttReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='closed', full_name='bilibili.community.service.dm.v1.DmSegOttReply.closed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='elems', full_name='bilibili.community.service.dm.v1.DmSegOttReply.elems', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=297,
serialized_end=390,
)
_DMSEGMOBILEREQ = _descriptor.Descriptor(
name='DmSegMobileReq',
full_name='bilibili.community.service.dm.v1.DmSegMobileReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pid', full_name='bilibili.community.service.dm.v1.DmSegMobileReq.pid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.community.service.dm.v1.DmSegMobileReq.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.community.service.dm.v1.DmSegMobileReq.type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='segment_index', full_name='bilibili.community.service.dm.v1.DmSegMobileReq.segment_index', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='teenagers_mode', full_name='bilibili.community.service.dm.v1.DmSegMobileReq.teenagers_mode', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=392,
serialized_end=495,
)
_DMSEGMOBILEREPLY = _descriptor.Descriptor(
name='DmSegMobileReply',
full_name='bilibili.community.service.dm.v1.DmSegMobileReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='elems', full_name='bilibili.community.service.dm.v1.DmSegMobileReply.elems', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='state', full_name='bilibili.community.service.dm.v1.DmSegMobileReply.state', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_flag', full_name='bilibili.community.service.dm.v1.DmSegMobileReply.ai_flag', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=498,
serialized_end=659,
)
_DMVIEWREQ = _descriptor.Descriptor(
name='DmViewReq',
full_name='bilibili.community.service.dm.v1.DmViewReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='pid', full_name='bilibili.community.service.dm.v1.DmViewReq.pid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.community.service.dm.v1.DmViewReq.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='type', full_name='bilibili.community.service.dm.v1.DmViewReq.type', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='spmid', full_name='bilibili.community.service.dm.v1.DmViewReq.spmid', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='is_hard_boot', full_name='bilibili.community.service.dm.v1.DmViewReq.is_hard_boot', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=661,
serialized_end=749,
)
_DMVIEWREPLY = _descriptor.Descriptor(
name='DmViewReply',
full_name='bilibili.community.service.dm.v1.DmViewReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='closed', full_name='bilibili.community.service.dm.v1.DmViewReply.closed', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mask', full_name='bilibili.community.service.dm.v1.DmViewReply.mask', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subtitle', full_name='bilibili.community.service.dm.v1.DmViewReply.subtitle', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='special_dms', full_name='bilibili.community.service.dm.v1.DmViewReply.special_dms', index=3,
number=4, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_flag', full_name='bilibili.community.service.dm.v1.DmViewReply.ai_flag', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_config', full_name='bilibili.community.service.dm.v1.DmViewReply.player_config', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='send_box_style', full_name='bilibili.community.service.dm.v1.DmViewReply.send_box_style', index=6,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='allow', full_name='bilibili.community.service.dm.v1.DmViewReply.allow', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='check_box', full_name='bilibili.community.service.dm.v1.DmViewReply.check_box', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='check_box_show_msg', full_name='bilibili.community.service.dm.v1.DmViewReply.check_box_show_msg', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='text_placeholder', full_name='bilibili.community.service.dm.v1.DmViewReply.text_placeholder', index=10,
number=11, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='input_placeholder', full_name='bilibili.community.service.dm.v1.DmViewReply.input_placeholder', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='report_filter_content', full_name='bilibili.community.service.dm.v1.DmViewReply.report_filter_content', index=12,
number=13, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=752,
serialized_end=1248,
)
_DMWEBVIEWREPLY = _descriptor.Descriptor(
name='DmWebViewReply',
full_name='bilibili.community.service.dm.v1.DmWebViewReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='state', full_name='bilibili.community.service.dm.v1.DmWebViewReply.state', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='text', full_name='bilibili.community.service.dm.v1.DmWebViewReply.text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='text_side', full_name='bilibili.community.service.dm.v1.DmWebViewReply.text_side', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dm_sge', full_name='bilibili.community.service.dm.v1.DmWebViewReply.dm_sge', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flag', full_name='bilibili.community.service.dm.v1.DmWebViewReply.flag', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='special_dms', full_name='bilibili.community.service.dm.v1.DmWebViewReply.special_dms', index=5,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='check_box', full_name='bilibili.community.service.dm.v1.DmWebViewReply.check_box', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='count', full_name='bilibili.community.service.dm.v1.DmWebViewReply.count', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='commandDms', full_name='bilibili.community.service.dm.v1.DmWebViewReply.commandDms', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_config', full_name='bilibili.community.service.dm.v1.DmWebViewReply.player_config', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='report_filter_content', full_name='bilibili.community.service.dm.v1.DmWebViewReply.report_filter_content', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1251,
serialized_end=1675,
)
_COMMANDDM = _descriptor.Descriptor(
name='CommandDm',
full_name='bilibili.community.service.dm.v1.CommandDm',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bilibili.community.service.dm.v1.CommandDm.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='oid', full_name='bilibili.community.service.dm.v1.CommandDm.oid', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mid', full_name='bilibili.community.service.dm.v1.CommandDm.mid', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='command', full_name='bilibili.community.service.dm.v1.CommandDm.command', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='content', full_name='bilibili.community.service.dm.v1.CommandDm.content', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='progress', full_name='bilibili.community.service.dm.v1.CommandDm.progress', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='bilibili.community.service.dm.v1.CommandDm.ctime', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mtime', full_name='bilibili.community.service.dm.v1.CommandDm.mtime', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='extra', full_name='bilibili.community.service.dm.v1.CommandDm.extra', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='idStr', full_name='bilibili.community.service.dm.v1.CommandDm.idStr', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1678,
serialized_end=1839,
)
_DMSEGCONFIG = _descriptor.Descriptor(
name='DmSegConfig',
full_name='bilibili.community.service.dm.v1.DmSegConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='page_size', full_name='bilibili.community.service.dm.v1.DmSegConfig.page_size', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='total', full_name='bilibili.community.service.dm.v1.DmSegConfig.total', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1841,
serialized_end=1888,
)
_VIDEOMASK = _descriptor.Descriptor(
name='VideoMask',
full_name='bilibili.community.service.dm.v1.VideoMask',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='cid', full_name='bilibili.community.service.dm.v1.VideoMask.cid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plat', full_name='bilibili.community.service.dm.v1.VideoMask.plat', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fps', full_name='bilibili.community.service.dm.v1.VideoMask.fps', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='bilibili.community.service.dm.v1.VideoMask.time', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mask_url', full_name='bilibili.community.service.dm.v1.VideoMask.mask_url', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1890,
serialized_end=1973,
)
_VIDEOSUBTITLE = _descriptor.Descriptor(
name='VideoSubtitle',
full_name='bilibili.community.service.dm.v1.VideoSubtitle',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='lan', full_name='bilibili.community.service.dm.v1.VideoSubtitle.lan', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lanDoc', full_name='bilibili.community.service.dm.v1.VideoSubtitle.lanDoc', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subtitles', full_name='bilibili.community.service.dm.v1.VideoSubtitle.subtitles', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1975,
serialized_end=2086,
)
_DANMUWEBPLAYERCONFIG = _descriptor.Descriptor(
name='DanmuWebPlayerConfig',
full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dm_switch', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.dm_switch', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_switch', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.ai_switch', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_level', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.ai_level', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocktop', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.blocktop', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockscroll', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.blockscroll', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockbottom', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.blockbottom', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockcolor', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.blockcolor', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockspecial', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.blockspecial', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='preventshade', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.preventshade', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dmask', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.dmask', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opacity', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.opacity', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dmarea', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.dmarea', index=11,
number=12, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='speedplus', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.speedplus', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fontsize', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.fontsize', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='screensync', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.screensync', index=14,
number=15, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='speedsync', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.speedsync', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fontfamily', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.fontfamily', index=16,
number=17, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='bold', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.bold', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fontborder', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.fontborder', index=18,
number=19, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='draw_type', full_name='bilibili.community.service.dm.v1.DanmuWebPlayerConfig.draw_type', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2089,
serialized_end=2488,
)
_SUBTITLEITEM = _descriptor.Descriptor(
name='SubtitleItem',
full_name='bilibili.community.service.dm.v1.SubtitleItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bilibili.community.service.dm.v1.SubtitleItem.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='id_str', full_name='bilibili.community.service.dm.v1.SubtitleItem.id_str', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lan', full_name='bilibili.community.service.dm.v1.SubtitleItem.lan', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='lan_doc', full_name='bilibili.community.service.dm.v1.SubtitleItem.lan_doc', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='subtitle_url', full_name='bilibili.community.service.dm.v1.SubtitleItem.subtitle_url', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='author', full_name='bilibili.community.service.dm.v1.SubtitleItem.author', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2491,
serialized_end=2645,
)
_USERINFO = _descriptor.Descriptor(
name='UserInfo',
full_name='bilibili.community.service.dm.v1.UserInfo',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='mid', full_name='bilibili.community.service.dm.v1.UserInfo.mid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='name', full_name='bilibili.community.service.dm.v1.UserInfo.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sex', full_name='bilibili.community.service.dm.v1.UserInfo.sex', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='face', full_name='bilibili.community.service.dm.v1.UserInfo.face', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sign', full_name='bilibili.community.service.dm.v1.UserInfo.sign', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rank', full_name='bilibili.community.service.dm.v1.UserInfo.rank', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2647,
serialized_end=2739,
)
_DANMAKUELEM = _descriptor.Descriptor(
name='DanmakuElem',
full_name='bilibili.community.service.dm.v1.DanmakuElem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='bilibili.community.service.dm.v1.DanmakuElem.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='progress', full_name='bilibili.community.service.dm.v1.DanmakuElem.progress', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mode', full_name='bilibili.community.service.dm.v1.DanmakuElem.mode', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='fontsize', full_name='bilibili.community.service.dm.v1.DanmakuElem.fontsize', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='color', full_name='bilibili.community.service.dm.v1.DanmakuElem.color', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='midHash', full_name='bilibili.community.service.dm.v1.DanmakuElem.midHash', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='content', full_name='bilibili.community.service.dm.v1.DanmakuElem.content', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ctime', full_name='bilibili.community.service.dm.v1.DanmakuElem.ctime', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='weight', full_name='bilibili.community.service.dm.v1.DanmakuElem.weight', index=8,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='action', full_name='bilibili.community.service.dm.v1.DanmakuElem.action', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='pool', full_name='bilibili.community.service.dm.v1.DanmakuElem.pool', index=10,
number=11, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='idStr', full_name='bilibili.community.service.dm.v1.DanmakuElem.idStr', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='attr', full_name='bilibili.community.service.dm.v1.DanmakuElem.attr', index=12,
number=13, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2742,
serialized_end=2956,
)
_DMPLAYERCONFIGREQ = _descriptor.Descriptor(
name='DmPlayerConfigReq',
full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='ts', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.ts', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='switch', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.switch', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='switch_save', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.switch_save', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='use_default_config', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.use_default_config', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_recommended_switch', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.ai_recommended_switch', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ai_recommended_level', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.ai_recommended_level', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blocktop', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blocktop', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockscroll', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blockscroll', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockbottom', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blockbottom', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockcolorful', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blockcolorful', index=9,
number=10, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockrepeat', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blockrepeat', index=10,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='blockspecial', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.blockspecial', index=11,
number=12, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='opacity', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.opacity', index=12,
number=13, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='scalingfactor', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.scalingfactor', index=13,
number=14, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='domain', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.domain', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='speed', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.speed', index=15,
number=16, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='enableblocklist', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.enableblocklist', index=16,
number=17, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inlinePlayerDanmakuSwitch', full_name='bilibili.community.service.dm.v1.DmPlayerConfigReq.inlinePlayerDanmakuSwitch', index=17,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2959,
serialized_end=4399,
)
_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='bilibili.community.service.dm.v1.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='bilibili.community.service.dm.v1.Response.code', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='bilibili.community.service.dm.v1.Response.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4401,
serialized_end=4442,
)
_DANMAKUFLAG = _descriptor.Descriptor(
name='DanmakuFlag',
full_name='bilibili.community.service.dm.v1.DanmakuFlag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dmid', full_name='bilibili.community.service.dm.v1.DanmakuFlag.dmid', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flag', full_name='bilibili.community.service.dm.v1.DanmakuFlag.flag', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4444,
serialized_end=4485,
)
_DANMAKUFLAGCONFIG = _descriptor.Descriptor(
name='DanmakuFlagConfig',
full_name='bilibili.community.service.dm.v1.DanmakuFlagConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='rec_flag', full_name='bilibili.community.service.dm.v1.DanmakuFlagConfig.rec_flag', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rec_text', full_name='bilibili.community.service.dm.v1.DanmakuFlagConfig.rec_text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='rec_switch', full_name='bilibili.community.service.dm.v1.DanmakuFlagConfig.rec_switch', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4487,
serialized_end=4562,
)
_DANMAKUAIFLAG = _descriptor.Descriptor(
name='DanmakuAIFlag',
full_name='bilibili.community.service.dm.v1.DanmakuAIFlag',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='dm_flags', full_name='bilibili.community.service.dm.v1.DanmakuAIFlag.dm_flags', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4564,
serialized_end=4644,
)
_DANMUPLAYERVIEWCONFIG = _descriptor.Descriptor(
name='DanmuPlayerViewConfig',
full_name='bilibili.community.service.dm.v1.DanmuPlayerViewConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='danmuku_default_player_config', full_name='bilibili.community.service.dm.v1.DanmuPlayerViewConfig.danmuku_default_player_config', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='danmuku_player_config', full_name='bilibili.community.service.dm.v1.DanmuPlayerViewConfig.danmuku_player_config', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='danmuku_player_dynamic_config', full_name='bilibili.community.service.dm.v1.DanmuPlayerViewConfig.danmuku_player_dynamic_config', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4647,
serialized_end=4952,
)
_DANMUDEFAULTPLAYERCONFIG = _descriptor.Descriptor(
name='DanmuDefaultPlayerConfig',
full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_danmaku_use_default_config', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_use_default_config', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_ai_recommended_switch', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_ai_recommended_switch', index=1,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_ai_recommended_level', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_ai_recommended_level', index=2,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blocktop', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blocktop', index=3,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockscroll', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blockscroll', index=4,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockbottom', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blockbottom', index=5,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockcolorful', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blockcolorful', index=6,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockrepeat', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blockrepeat', index=7,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockspecial', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_blockspecial', index=8,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_opacity', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_opacity', index=9,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_scalingfactor', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_scalingfactor', index=10,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_domain', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_domain', index=11,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_speed', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.player_danmaku_speed', index=12,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inline_player_danmaku_switch', full_name='bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig.inline_player_danmaku_switch', index=13,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4955,
serialized_end=5500,
)
_DANMUPLAYERCONFIG = _descriptor.Descriptor(
name='DanmuPlayerConfig',
full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='player_danmaku_switch', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_switch', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_switch_save', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_switch_save', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_use_default_config', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_use_default_config', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_ai_recommended_switch', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_ai_recommended_switch', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_ai_recommended_level', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_ai_recommended_level', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blocktop', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blocktop', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockscroll', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blockscroll', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockbottom', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blockbottom', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockcolorful', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blockcolorful', index=8,
number=9, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockrepeat', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blockrepeat', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_blockspecial', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_blockspecial', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_opacity', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_opacity', index=11,
number=12, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_scalingfactor', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_scalingfactor', index=12,
number=13, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_domain', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_domain', index=13,
number=14, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_speed', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_speed', index=14,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_enableblocklist', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.player_danmaku_enableblocklist', index=15,
number=16, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inline_player_danmaku_switch', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.inline_player_danmaku_switch', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='inline_player_danmaku_config', full_name='bilibili.community.service.dm.v1.DanmuPlayerConfig.inline_player_danmaku_config', index=17,
number=18, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5503,
serialized_end=6186,
)
_DANMUPLAYERDYNAMICCONFIG = _descriptor.Descriptor(
name='DanmuPlayerDynamicConfig',
full_name='bilibili.community.service.dm.v1.DanmuPlayerDynamicConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='progress', full_name='bilibili.community.service.dm.v1.DanmuPlayerDynamicConfig.progress', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='player_danmaku_domain', full_name='bilibili.community.service.dm.v1.DanmuPlayerDynamicConfig.player_danmaku_domain', index=1,
number=2, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6188,
serialized_end=6263,
)
_PLAYERDANMAKUSWITCH = _descriptor.Descriptor(
name='PlayerDanmakuSwitch',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuSwitch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuSwitch.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='canIgnore', full_name='bilibili.community.service.dm.v1.PlayerDanmakuSwitch.canIgnore', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6265,
serialized_end=6320,
)
_PLAYERDANMAKUSWITCHSAVE = _descriptor.Descriptor(
name='PlayerDanmakuSwitchSave',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuSwitchSave',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuSwitchSave.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6322,
serialized_end=6362,
)
_PLAYERDANMAKUUSEDEFAULTCONFIG = _descriptor.Descriptor(
name='PlayerDanmakuUseDefaultConfig',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuUseDefaultConfig',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuUseDefaultConfig.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6364,
serialized_end=6410,
)
_PLAYERDANMAKUAIRECOMMENDEDSWITCH = _descriptor.Descriptor(
name='PlayerDanmakuAiRecommendedSwitch',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedSwitch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedSwitch.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6412,
serialized_end=6461,
)
_PLAYERDANMAKUAIRECOMMENDEDLEVEL = _descriptor.Descriptor(
name='PlayerDanmakuAiRecommendedLevel',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedLevel',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedLevel.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6463,
serialized_end=6511,
)
_PLAYERDANMAKUBLOCKTOP = _descriptor.Descriptor(
name='PlayerDanmakuBlocktop',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlocktop',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlocktop.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6513,
serialized_end=6551,
)
_PLAYERDANMAKUBLOCKSCROLL = _descriptor.Descriptor(
name='PlayerDanmakuBlockscroll',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockscroll',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockscroll.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6553,
serialized_end=6594,
)
_PLAYERDANMAKUBLOCKBOTTOM = _descriptor.Descriptor(
name='PlayerDanmakuBlockbottom',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockbottom',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockbottom.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6596,
serialized_end=6637,
)
_PLAYERDANMAKUBLOCKCOLORFUL = _descriptor.Descriptor(
name='PlayerDanmakuBlockcolorful',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockcolorful',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockcolorful.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6639,
serialized_end=6682,
)
_PLAYERDANMAKUBLOCKREPEAT = _descriptor.Descriptor(
name='PlayerDanmakuBlockrepeat',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockrepeat',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockrepeat.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6684,
serialized_end=6725,
)
_PLAYERDANMAKUBLOCKSPECIAL = _descriptor.Descriptor(
name='PlayerDanmakuBlockspecial',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockspecial',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuBlockspecial.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6727,
serialized_end=6769,
)
_PLAYERDANMAKUOPACITY = _descriptor.Descriptor(
name='PlayerDanmakuOpacity',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuOpacity',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuOpacity.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6771,
serialized_end=6808,
)
_PLAYERDANMAKUSCALINGFACTOR = _descriptor.Descriptor(
name='PlayerDanmakuScalingfactor',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuScalingfactor',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuScalingfactor.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6810,
serialized_end=6853,
)
_PLAYERDANMAKUDOMAIN = _descriptor.Descriptor(
name='PlayerDanmakuDomain',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuDomain',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuDomain.value', index=0,
number=1, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6855,
serialized_end=6891,
)
_PLAYERDANMAKUSPEED = _descriptor.Descriptor(
name='PlayerDanmakuSpeed',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuSpeed',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuSpeed.value', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6893,
serialized_end=6928,
)
_PLAYERDANMAKUENABLEBLOCKLIST = _descriptor.Descriptor(
name='PlayerDanmakuEnableblocklist',
full_name='bilibili.community.service.dm.v1.PlayerDanmakuEnableblocklist',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.PlayerDanmakuEnableblocklist.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6930,
serialized_end=6975,
)
_INLINEPLAYERDANMAKUSWITCH = _descriptor.Descriptor(
name='InlinePlayerDanmakuSwitch',
full_name='bilibili.community.service.dm.v1.InlinePlayerDanmakuSwitch',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='bilibili.community.service.dm.v1.InlinePlayerDanmakuSwitch.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6977,
serialized_end=7019,
)
_DMSEGSDKREPLY.fields_by_name['elems'].message_type = _DANMAKUELEM
_DMSEGOTTREPLY.fields_by_name['elems'].message_type = _DANMAKUELEM
_DMSEGMOBILEREPLY.fields_by_name['elems'].message_type = _DANMAKUELEM
_DMSEGMOBILEREPLY.fields_by_name['ai_flag'].message_type = _DANMAKUAIFLAG
_DMVIEWREPLY.fields_by_name['mask'].message_type = _VIDEOMASK
_DMVIEWREPLY.fields_by_name['subtitle'].message_type = _VIDEOSUBTITLE
_DMVIEWREPLY.fields_by_name['ai_flag'].message_type = _DANMAKUFLAGCONFIG
_DMVIEWREPLY.fields_by_name['player_config'].message_type = _DANMUPLAYERVIEWCONFIG
_DMWEBVIEWREPLY.fields_by_name['dm_sge'].message_type = _DMSEGCONFIG
_DMWEBVIEWREPLY.fields_by_name['flag'].message_type = _DANMAKUFLAGCONFIG
_DMWEBVIEWREPLY.fields_by_name['commandDms'].message_type = _COMMANDDM
_DMWEBVIEWREPLY.fields_by_name['player_config'].message_type = _DANMUWEBPLAYERCONFIG
_VIDEOSUBTITLE.fields_by_name['subtitles'].message_type = _SUBTITLEITEM
_SUBTITLEITEM.fields_by_name['author'].message_type = _USERINFO
_DMPLAYERCONFIGREQ.fields_by_name['switch'].message_type = _PLAYERDANMAKUSWITCH
_DMPLAYERCONFIGREQ.fields_by_name['switch_save'].message_type = _PLAYERDANMAKUSWITCHSAVE
_DMPLAYERCONFIGREQ.fields_by_name['use_default_config'].message_type = _PLAYERDANMAKUUSEDEFAULTCONFIG
_DMPLAYERCONFIGREQ.fields_by_name['ai_recommended_switch'].message_type = _PLAYERDANMAKUAIRECOMMENDEDSWITCH
_DMPLAYERCONFIGREQ.fields_by_name['ai_recommended_level'].message_type = _PLAYERDANMAKUAIRECOMMENDEDLEVEL
_DMPLAYERCONFIGREQ.fields_by_name['blocktop'].message_type = _PLAYERDANMAKUBLOCKTOP
_DMPLAYERCONFIGREQ.fields_by_name['blockscroll'].message_type = _PLAYERDANMAKUBLOCKSCROLL
_DMPLAYERCONFIGREQ.fields_by_name['blockbottom'].message_type = _PLAYERDANMAKUBLOCKBOTTOM
_DMPLAYERCONFIGREQ.fields_by_name['blockcolorful'].message_type = _PLAYERDANMAKUBLOCKCOLORFUL
_DMPLAYERCONFIGREQ.fields_by_name['blockrepeat'].message_type = _PLAYERDANMAKUBLOCKREPEAT
_DMPLAYERCONFIGREQ.fields_by_name['blockspecial'].message_type = _PLAYERDANMAKUBLOCKSPECIAL
_DMPLAYERCONFIGREQ.fields_by_name['opacity'].message_type = _PLAYERDANMAKUOPACITY
_DMPLAYERCONFIGREQ.fields_by_name['scalingfactor'].message_type = _PLAYERDANMAKUSCALINGFACTOR
_DMPLAYERCONFIGREQ.fields_by_name['domain'].message_type = _PLAYERDANMAKUDOMAIN
_DMPLAYERCONFIGREQ.fields_by_name['speed'].message_type = _PLAYERDANMAKUSPEED
_DMPLAYERCONFIGREQ.fields_by_name['enableblocklist'].message_type = _PLAYERDANMAKUENABLEBLOCKLIST
_DMPLAYERCONFIGREQ.fields_by_name['inlinePlayerDanmakuSwitch'].message_type = _INLINEPLAYERDANMAKUSWITCH
_DANMAKUAIFLAG.fields_by_name['dm_flags'].message_type = _DANMAKUFLAG
_DANMUPLAYERVIEWCONFIG.fields_by_name['danmuku_default_player_config'].message_type = _DANMUDEFAULTPLAYERCONFIG
_DANMUPLAYERVIEWCONFIG.fields_by_name['danmuku_player_config'].message_type = _DANMUPLAYERCONFIG
_DANMUPLAYERVIEWCONFIG.fields_by_name['danmuku_player_dynamic_config'].message_type = _DANMUPLAYERDYNAMICCONFIG
DESCRIPTOR.message_types_by_name['DmSegSDKReq'] = _DMSEGSDKREQ
DESCRIPTOR.message_types_by_name['DmSegSDKReply'] = _DMSEGSDKREPLY
DESCRIPTOR.message_types_by_name['DmSegOttReq'] = _DMSEGOTTREQ
DESCRIPTOR.message_types_by_name['DmSegOttReply'] = _DMSEGOTTREPLY
DESCRIPTOR.message_types_by_name['DmSegMobileReq'] = _DMSEGMOBILEREQ
DESCRIPTOR.message_types_by_name['DmSegMobileReply'] = _DMSEGMOBILEREPLY
DESCRIPTOR.message_types_by_name['DmViewReq'] = _DMVIEWREQ
DESCRIPTOR.message_types_by_name['DmViewReply'] = _DMVIEWREPLY
DESCRIPTOR.message_types_by_name['DmWebViewReply'] = _DMWEBVIEWREPLY
DESCRIPTOR.message_types_by_name['CommandDm'] = _COMMANDDM
DESCRIPTOR.message_types_by_name['DmSegConfig'] = _DMSEGCONFIG
DESCRIPTOR.message_types_by_name['VideoMask'] = _VIDEOMASK
DESCRIPTOR.message_types_by_name['VideoSubtitle'] = _VIDEOSUBTITLE
DESCRIPTOR.message_types_by_name['DanmuWebPlayerConfig'] = _DANMUWEBPLAYERCONFIG
DESCRIPTOR.message_types_by_name['SubtitleItem'] = _SUBTITLEITEM
DESCRIPTOR.message_types_by_name['UserInfo'] = _USERINFO
DESCRIPTOR.message_types_by_name['DanmakuElem'] = _DANMAKUELEM
DESCRIPTOR.message_types_by_name['DmPlayerConfigReq'] = _DMPLAYERCONFIGREQ
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE
DESCRIPTOR.message_types_by_name['DanmakuFlag'] = _DANMAKUFLAG
DESCRIPTOR.message_types_by_name['DanmakuFlagConfig'] = _DANMAKUFLAGCONFIG
DESCRIPTOR.message_types_by_name['DanmakuAIFlag'] = _DANMAKUAIFLAG
DESCRIPTOR.message_types_by_name['DanmuPlayerViewConfig'] = _DANMUPLAYERVIEWCONFIG
DESCRIPTOR.message_types_by_name['DanmuDefaultPlayerConfig'] = _DANMUDEFAULTPLAYERCONFIG
DESCRIPTOR.message_types_by_name['DanmuPlayerConfig'] = _DANMUPLAYERCONFIG
DESCRIPTOR.message_types_by_name['DanmuPlayerDynamicConfig'] = _DANMUPLAYERDYNAMICCONFIG
DESCRIPTOR.message_types_by_name['PlayerDanmakuSwitch'] = _PLAYERDANMAKUSWITCH
DESCRIPTOR.message_types_by_name['PlayerDanmakuSwitchSave'] = _PLAYERDANMAKUSWITCHSAVE
DESCRIPTOR.message_types_by_name['PlayerDanmakuUseDefaultConfig'] = _PLAYERDANMAKUUSEDEFAULTCONFIG
DESCRIPTOR.message_types_by_name['PlayerDanmakuAiRecommendedSwitch'] = _PLAYERDANMAKUAIRECOMMENDEDSWITCH
DESCRIPTOR.message_types_by_name['PlayerDanmakuAiRecommendedLevel'] = _PLAYERDANMAKUAIRECOMMENDEDLEVEL
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlocktop'] = _PLAYERDANMAKUBLOCKTOP
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlockscroll'] = _PLAYERDANMAKUBLOCKSCROLL
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlockbottom'] = _PLAYERDANMAKUBLOCKBOTTOM
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlockcolorful'] = _PLAYERDANMAKUBLOCKCOLORFUL
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlockrepeat'] = _PLAYERDANMAKUBLOCKREPEAT
DESCRIPTOR.message_types_by_name['PlayerDanmakuBlockspecial'] = _PLAYERDANMAKUBLOCKSPECIAL
DESCRIPTOR.message_types_by_name['PlayerDanmakuOpacity'] = _PLAYERDANMAKUOPACITY
DESCRIPTOR.message_types_by_name['PlayerDanmakuScalingfactor'] = _PLAYERDANMAKUSCALINGFACTOR
DESCRIPTOR.message_types_by_name['PlayerDanmakuDomain'] = _PLAYERDANMAKUDOMAIN
DESCRIPTOR.message_types_by_name['PlayerDanmakuSpeed'] = _PLAYERDANMAKUSPEED
DESCRIPTOR.message_types_by_name['PlayerDanmakuEnableblocklist'] = _PLAYERDANMAKUENABLEBLOCKLIST
DESCRIPTOR.message_types_by_name['InlinePlayerDanmakuSwitch'] = _INLINEPLAYERDANMAKUSWITCH
DESCRIPTOR.enum_types_by_name['DMAttrBit'] = _DMATTRBIT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DmSegSDKReq = _reflection.GeneratedProtocolMessageType('DmSegSDKReq', (_message.Message,), {
'DESCRIPTOR' : _DMSEGSDKREQ,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegSDKReq)
})
_sym_db.RegisterMessage(DmSegSDKReq)
DmSegSDKReply = _reflection.GeneratedProtocolMessageType('DmSegSDKReply', (_message.Message,), {
'DESCRIPTOR' : _DMSEGSDKREPLY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegSDKReply)
})
_sym_db.RegisterMessage(DmSegSDKReply)
DmSegOttReq = _reflection.GeneratedProtocolMessageType('DmSegOttReq', (_message.Message,), {
'DESCRIPTOR' : _DMSEGOTTREQ,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegOttReq)
})
_sym_db.RegisterMessage(DmSegOttReq)
DmSegOttReply = _reflection.GeneratedProtocolMessageType('DmSegOttReply', (_message.Message,), {
'DESCRIPTOR' : _DMSEGOTTREPLY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegOttReply)
})
_sym_db.RegisterMessage(DmSegOttReply)
DmSegMobileReq = _reflection.GeneratedProtocolMessageType('DmSegMobileReq', (_message.Message,), {
'DESCRIPTOR' : _DMSEGMOBILEREQ,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegMobileReq)
})
_sym_db.RegisterMessage(DmSegMobileReq)
DmSegMobileReply = _reflection.GeneratedProtocolMessageType('DmSegMobileReply', (_message.Message,), {
'DESCRIPTOR' : _DMSEGMOBILEREPLY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegMobileReply)
})
_sym_db.RegisterMessage(DmSegMobileReply)
DmViewReq = _reflection.GeneratedProtocolMessageType('DmViewReq', (_message.Message,), {
'DESCRIPTOR' : _DMVIEWREQ,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmViewReq)
})
_sym_db.RegisterMessage(DmViewReq)
DmViewReply = _reflection.GeneratedProtocolMessageType('DmViewReply', (_message.Message,), {
'DESCRIPTOR' : _DMVIEWREPLY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmViewReply)
})
_sym_db.RegisterMessage(DmViewReply)
DmWebViewReply = _reflection.GeneratedProtocolMessageType('DmWebViewReply', (_message.Message,), {
'DESCRIPTOR' : _DMWEBVIEWREPLY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmWebViewReply)
})
_sym_db.RegisterMessage(DmWebViewReply)
CommandDm = _reflection.GeneratedProtocolMessageType('CommandDm', (_message.Message,), {
'DESCRIPTOR' : _COMMANDDM,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.CommandDm)
})
_sym_db.RegisterMessage(CommandDm)
DmSegConfig = _reflection.GeneratedProtocolMessageType('DmSegConfig', (_message.Message,), {
'DESCRIPTOR' : _DMSEGCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmSegConfig)
})
_sym_db.RegisterMessage(DmSegConfig)
VideoMask = _reflection.GeneratedProtocolMessageType('VideoMask', (_message.Message,), {
'DESCRIPTOR' : _VIDEOMASK,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.VideoMask)
})
_sym_db.RegisterMessage(VideoMask)
VideoSubtitle = _reflection.GeneratedProtocolMessageType('VideoSubtitle', (_message.Message,), {
'DESCRIPTOR' : _VIDEOSUBTITLE,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.VideoSubtitle)
})
_sym_db.RegisterMessage(VideoSubtitle)
DanmuWebPlayerConfig = _reflection.GeneratedProtocolMessageType('DanmuWebPlayerConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMUWEBPLAYERCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmuWebPlayerConfig)
})
_sym_db.RegisterMessage(DanmuWebPlayerConfig)
SubtitleItem = _reflection.GeneratedProtocolMessageType('SubtitleItem', (_message.Message,), {
'DESCRIPTOR' : _SUBTITLEITEM,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.SubtitleItem)
})
_sym_db.RegisterMessage(SubtitleItem)
UserInfo = _reflection.GeneratedProtocolMessageType('UserInfo', (_message.Message,), {
'DESCRIPTOR' : _USERINFO,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.UserInfo)
})
_sym_db.RegisterMessage(UserInfo)
DanmakuElem = _reflection.GeneratedProtocolMessageType('DanmakuElem', (_message.Message,), {
'DESCRIPTOR' : _DANMAKUELEM,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmakuElem)
})
_sym_db.RegisterMessage(DanmakuElem)
DmPlayerConfigReq = _reflection.GeneratedProtocolMessageType('DmPlayerConfigReq', (_message.Message,), {
'DESCRIPTOR' : _DMPLAYERCONFIGREQ,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DmPlayerConfigReq)
})
_sym_db.RegisterMessage(DmPlayerConfigReq)
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), {
'DESCRIPTOR' : _RESPONSE,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.Response)
})
_sym_db.RegisterMessage(Response)
DanmakuFlag = _reflection.GeneratedProtocolMessageType('DanmakuFlag', (_message.Message,), {
'DESCRIPTOR' : _DANMAKUFLAG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmakuFlag)
})
_sym_db.RegisterMessage(DanmakuFlag)
DanmakuFlagConfig = _reflection.GeneratedProtocolMessageType('DanmakuFlagConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMAKUFLAGCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmakuFlagConfig)
})
_sym_db.RegisterMessage(DanmakuFlagConfig)
DanmakuAIFlag = _reflection.GeneratedProtocolMessageType('DanmakuAIFlag', (_message.Message,), {
'DESCRIPTOR' : _DANMAKUAIFLAG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmakuAIFlag)
})
_sym_db.RegisterMessage(DanmakuAIFlag)
DanmuPlayerViewConfig = _reflection.GeneratedProtocolMessageType('DanmuPlayerViewConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMUPLAYERVIEWCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmuPlayerViewConfig)
})
_sym_db.RegisterMessage(DanmuPlayerViewConfig)
DanmuDefaultPlayerConfig = _reflection.GeneratedProtocolMessageType('DanmuDefaultPlayerConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMUDEFAULTPLAYERCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmuDefaultPlayerConfig)
})
_sym_db.RegisterMessage(DanmuDefaultPlayerConfig)
DanmuPlayerConfig = _reflection.GeneratedProtocolMessageType('DanmuPlayerConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMUPLAYERCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmuPlayerConfig)
})
_sym_db.RegisterMessage(DanmuPlayerConfig)
DanmuPlayerDynamicConfig = _reflection.GeneratedProtocolMessageType('DanmuPlayerDynamicConfig', (_message.Message,), {
'DESCRIPTOR' : _DANMUPLAYERDYNAMICCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.DanmuPlayerDynamicConfig)
})
_sym_db.RegisterMessage(DanmuPlayerDynamicConfig)
PlayerDanmakuSwitch = _reflection.GeneratedProtocolMessageType('PlayerDanmakuSwitch', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUSWITCH,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuSwitch)
})
_sym_db.RegisterMessage(PlayerDanmakuSwitch)
PlayerDanmakuSwitchSave = _reflection.GeneratedProtocolMessageType('PlayerDanmakuSwitchSave', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUSWITCHSAVE,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuSwitchSave)
})
_sym_db.RegisterMessage(PlayerDanmakuSwitchSave)
PlayerDanmakuUseDefaultConfig = _reflection.GeneratedProtocolMessageType('PlayerDanmakuUseDefaultConfig', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUUSEDEFAULTCONFIG,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuUseDefaultConfig)
})
_sym_db.RegisterMessage(PlayerDanmakuUseDefaultConfig)
PlayerDanmakuAiRecommendedSwitch = _reflection.GeneratedProtocolMessageType('PlayerDanmakuAiRecommendedSwitch', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUAIRECOMMENDEDSWITCH,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedSwitch)
})
_sym_db.RegisterMessage(PlayerDanmakuAiRecommendedSwitch)
PlayerDanmakuAiRecommendedLevel = _reflection.GeneratedProtocolMessageType('PlayerDanmakuAiRecommendedLevel', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUAIRECOMMENDEDLEVEL,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuAiRecommendedLevel)
})
_sym_db.RegisterMessage(PlayerDanmakuAiRecommendedLevel)
PlayerDanmakuBlocktop = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlocktop', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKTOP,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlocktop)
})
_sym_db.RegisterMessage(PlayerDanmakuBlocktop)
PlayerDanmakuBlockscroll = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlockscroll', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKSCROLL,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlockscroll)
})
_sym_db.RegisterMessage(PlayerDanmakuBlockscroll)
PlayerDanmakuBlockbottom = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlockbottom', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKBOTTOM,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlockbottom)
})
_sym_db.RegisterMessage(PlayerDanmakuBlockbottom)
PlayerDanmakuBlockcolorful = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlockcolorful', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKCOLORFUL,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlockcolorful)
})
_sym_db.RegisterMessage(PlayerDanmakuBlockcolorful)
PlayerDanmakuBlockrepeat = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlockrepeat', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKREPEAT,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlockrepeat)
})
_sym_db.RegisterMessage(PlayerDanmakuBlockrepeat)
PlayerDanmakuBlockspecial = _reflection.GeneratedProtocolMessageType('PlayerDanmakuBlockspecial', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUBLOCKSPECIAL,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuBlockspecial)
})
_sym_db.RegisterMessage(PlayerDanmakuBlockspecial)
PlayerDanmakuOpacity = _reflection.GeneratedProtocolMessageType('PlayerDanmakuOpacity', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUOPACITY,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuOpacity)
})
_sym_db.RegisterMessage(PlayerDanmakuOpacity)
PlayerDanmakuScalingfactor = _reflection.GeneratedProtocolMessageType('PlayerDanmakuScalingfactor', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUSCALINGFACTOR,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuScalingfactor)
})
_sym_db.RegisterMessage(PlayerDanmakuScalingfactor)
PlayerDanmakuDomain = _reflection.GeneratedProtocolMessageType('PlayerDanmakuDomain', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUDOMAIN,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuDomain)
})
_sym_db.RegisterMessage(PlayerDanmakuDomain)
PlayerDanmakuSpeed = _reflection.GeneratedProtocolMessageType('PlayerDanmakuSpeed', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUSPEED,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuSpeed)
})
_sym_db.RegisterMessage(PlayerDanmakuSpeed)
PlayerDanmakuEnableblocklist = _reflection.GeneratedProtocolMessageType('PlayerDanmakuEnableblocklist', (_message.Message,), {
'DESCRIPTOR' : _PLAYERDANMAKUENABLEBLOCKLIST,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.PlayerDanmakuEnableblocklist)
})
_sym_db.RegisterMessage(PlayerDanmakuEnableblocklist)
InlinePlayerDanmakuSwitch = _reflection.GeneratedProtocolMessageType('InlinePlayerDanmakuSwitch', (_message.Message,), {
'DESCRIPTOR' : _INLINEPLAYERDANMAKUSWITCH,
'__module__' : 'dm_pb2'
# @@protoc_insertion_point(class_scope:bilibili.community.service.dm.v1.InlinePlayerDanmakuSwitch)
})
_sym_db.RegisterMessage(InlinePlayerDanmakuSwitch)
_DM = _descriptor.ServiceDescriptor(
name='DM',
full_name='bilibili.community.service.dm.v1.DM',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=7100,
serialized_end=7654,
methods=[
_descriptor.MethodDescriptor(
name='DmSegMobile',
full_name='bilibili.community.service.dm.v1.DM.DmSegMobile',
index=0,
containing_service=None,
input_type=_DMSEGMOBILEREQ,
output_type=_DMSEGMOBILEREPLY,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DmView',
full_name='bilibili.community.service.dm.v1.DM.DmView',
index=1,
containing_service=None,
input_type=_DMVIEWREQ,
output_type=_DMVIEWREPLY,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DmPlayerConfig',
full_name='bilibili.community.service.dm.v1.DM.DmPlayerConfig',
index=2,
containing_service=None,
input_type=_DMPLAYERCONFIGREQ,
output_type=_RESPONSE,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DmSegOtt',
full_name='bilibili.community.service.dm.v1.DM.DmSegOtt',
index=3,
containing_service=None,
input_type=_DMSEGOTTREQ,
output_type=_DMSEGOTTREPLY,
serialized_options=None,
),
_descriptor.MethodDescriptor(
name='DmSegSDK',
full_name='bilibili.community.service.dm.v1.DM.DmSegSDK',
index=4,
containing_service=None,
input_type=_DMSEGSDKREQ,
output_type=_DMSEGSDKREPLY,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_DM)
DESCRIPTOR.services_by_name['DM'] = _DM
# @@protoc_insertion_point(module_scope)
| 45.731379
| 12,420
| 0.755016
|
b878ccd6538ef10f1f89fa5681191305e44f2102
| 7,614
|
py
|
Python
|
configuration/views.py
|
Not-Morgan/PGDBWebServer
|
9777773db763a13f168da633c69b9271f9da24b1
|
[
"MIT"
] | 1
|
2021-05-25T04:30:12.000Z
|
2021-05-25T04:30:12.000Z
|
configuration/views.py
|
Not-Morgan/PGDBWebServer
|
9777773db763a13f168da633c69b9271f9da24b1
|
[
"MIT"
] | 70
|
2020-02-20T23:43:52.000Z
|
2022-03-12T00:08:12.000Z
|
configuration/views.py
|
Not-Morgan/PGDBWebServer
|
9777773db763a13f168da633c69b9271f9da24b1
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse, HttpResponseRedirect, Http404, JsonResponse
import os, pytz, datetime
from django.template.loader import render_to_string, get_template
from django.urls import reverse
import dateutil.parser
import httplib2, threading
from googleapiclient.discovery import build
from oauth2client.service_account import ServiceAccountCredentials
from PGDBWebServer.settings import BUILD_NUMBER
from data.models import *
offline_status = False
integrity_data = []
def help(request):
template = get_template('configuration/help.html')
context = {
'build': BUILD_NUMBER,
}
if request.user.is_superuser:
return HttpResponse(template.render(context, request))
else:
return HttpResponseRedirect(reverse('entry:error'))
def google_calendar():
maintenance = []
notice = []
now = datetime.datetime.utcnow().isoformat() + 'Z'
SCOPES = 'https://www.googleapis.com/auth/calendar'
global offline_status
offline_status = False
secret = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'../PGDBWebServer/ServerConfig/client_secret.json')
credentials = ServiceAccountCredentials.from_json_keyfile_name(filename=secret, scopes=SCOPES)
http = credentials.authorize(httplib2.Http())
service_now = [pytz.utc.localize(datetime.datetime(1970, 1, 1)).astimezone(pytz.timezone('America/Vancouver')),
pytz.utc.localize(datetime.datetime(1970, 1, 1)).astimezone(pytz.timezone('America/Vancouver'))]
try:
service = build('calendar', 'v3', http=http)
events = service.events().list(calendarId='pointgreydb@gmail.com', maxResults=10, timeMin=now, singleEvents=True,
orderBy='startTime').execute()
events = events.get('items', [])
now = pytz.utc.localize(datetime.datetime.utcnow()).astimezone(pytz.timezone('America/Vancouver'))
for event in events:
if "MAINTENANCE:" in event.get("summary"):
maintenance.append({
'action': event['summary'].replace("MAINTENANCE: ", "") if 'summary' in event else "",
'note': event['description'] if 'description' in event else "",
'start': dateutil.parser.parse(event["start"]["dateTime"]).strftime("%d %b, %Y %H:%M"),
'end': dateutil.parser.parse(event["end"]["dateTime"]).strftime("%d %b, %Y %H:%M"),
})
service_now[0] = dateutil.parser.parse(event["start"]["dateTime"])
service_now[1] = dateutil.parser.parse(event["end"]["dateTime"])
if service_now[0] < now < service_now[1]:
offline_status = True
else:
notice.append({
'title': event['summary'].replace("NOTICE: ", "") if 'summary' in event else "",
'note': event['description'] if 'description' in event else "",
'start': dateutil.parser.parse(event["start"]["dateTime"]).strftime("%d %b, %Y %H:%M"),
'end': dateutil.parser.parse(event["end"]["dateTime"]).strftime("%d %b, %Y %H:%M"),
})
# except httplib2.ServerNotFoundError or httplib2.HttpLib2Error:
except Exception as e:
print(e)
notice = [{'title': "ERR", 'note': "Please check your internet connection", 'start': "--:--", 'end': "-", }]
# Current date in UTC
#print(offline_status)
return maintenance, notice, offline_status
def offline(request):
maintenance, notice, status = google_calendar()
if status:
request.user.first_visit = False
request.user.save()
context = {
'maintenance': maintenance[0],
}
return HttpResponse(get_template('configuration/offline.html').render(context, request))
return HttpResponseRedirect(reverse('data:index'))
def intergrity_check(request):
check = threading.Thread(target=check_integrity, args=())
check.start()
return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/'))
def check_integrity():
global integrity_data
integrity_data = []
integrity_data.append(f"Last ran: {datetime.datetime.now()}")
for student in Student.objects.all():
for grade in student.all_grades:
temp = grade.SE_total
grade.calc_points_total("SE")
if not temp == grade.SE_total: integrity_data.append(f"{student}: Fixed service totalling points {grade.grade}.")
temp = grade.AT_total
grade.calc_points_total("AT")
if not temp == grade.AT_total: integrity_data.append(f"{student}: Fixed athletic totalling points {grade.grade}.")
temp = grade.FA_total
grade.calc_points_total("FA")
if not temp == grade.FA_total: integrity_data.append(f"{student}: Fixed fine arts points totalling {grade.grade}.")
temp = grade.SC_total
grade.calc_SC_total()
if not temp == round(grade.SC_total, 3): integrity_data.append(f"{student}: Fixed scholar points totalling {grade.grade}.")
if not 0 <= grade.term1_avg < 100: integrity_data.append(f"{student}: Grade {grade.grade} average Term 1 invalid")
if not 0 <= grade.term2_avg < 100: integrity_data.append(f"{student}: Grade {grade.grade} average Term 2 invalid")
# integrity_data.append(f"{student}: Check OK")
for plist in PlistCutoff.objects.all():
if not 0 <= plist.grade_8_T1 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 8 T1 invalid")
if not 0 <= plist.grade_9_T1 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 9 T1 invalid")
if not 0 <= plist.grade_10_T1 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 10 T1 invalid")
if not 0 <= plist.grade_11_T1 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 11 T1 invalid")
if not 0 <= plist.grade_12_T1 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 12 T1 invalid")
if not 0 <= plist.grade_8_T2 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 8 T2 invalid")
if not 0 <= plist.grade_9_T2 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 9 T2 invalid")
if not 0 <= plist.grade_10_T2 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 10 T2 invalid")
if not 0 <= plist.grade_11_T2 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 11 T2 invalid")
if not 0 <= plist.grade_12_T2 <= 100: integrity_data.append(f"Plist: Grade {plist.year} average Term 12 T2 invalid")
integrity_data.append(f"Check completed: {datetime.datetime.now()}")
def integrity_report(request):
global integrity_data
template = get_template('configuration/integrity.html')
context = {
'integrity_data': integrity_data,
}
if request.user.is_superuser:
return HttpResponse(template.render(context, request))
else:
return HttpResponseRedirect(reverse('entry:error'))
def handle_exception_40X(request, exception):
context = {
'exception': exception,
}
return HttpResponse(get_template('configuration/offline.html').render(context, request))
def handle_exception_50X(request):
context = {
}
return HttpResponse(get_template('configuration/offline.html').render(context, request))
def csrf_failure(request, reason=""):
return HttpResponseRedirect('/')
| 46.145455
| 135
| 0.652745
|
937657ec6d0e625429d11de03c578175d2dd5926
| 1,347
|
py
|
Python
|
nicos_virt_mlz/treff/setups/analyzer.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 12
|
2019-11-06T15:40:36.000Z
|
2022-01-01T16:23:00.000Z
|
nicos_virt_mlz/treff/setups/analyzer.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 91
|
2020-08-18T09:20:26.000Z
|
2022-02-01T11:07:14.000Z
|
nicos_virt_mlz/treff/setups/analyzer.py
|
jkrueger1/nicos
|
5f4ce66c312dedd78995f9d91e8a6e3c891b262b
|
[
"CC-BY-3.0",
"Apache-2.0",
"CC-BY-4.0"
] | 6
|
2020-01-11T10:52:30.000Z
|
2022-02-25T12:35:23.000Z
|
description = 'Analyzer device'
group = 'optional'
devices = dict(
analyzer_tilt = device('nicos.devices.generic.Axis',
description = 'Analyzer tilt',
motor = device('nicos.devices.generic.VirtualMotor',
abslimits = (-0.81, 3.4),
unit = 'deg',
),
precision = 0.01,
fmtstr = '%.3f',
),
# aflipper = device('nicos_mlz.treff.devices.flipper.Flipper',
# description = 'Analyzer flip',
# flip = 'pow2flip',
# corr = 'pow2comp',
# currents = (1., 0.),
# ),
# pow2comp = device('nicos.devices.entangle.PowerSupply',
# description = 'Power supply 2 current control ch 1',
# tangodevice = tango_base + 'toellner/pow2comp',
# ),
# pow2flip = device('nicos.devices.entangle.PowerSupply',
# description = 'Power supply 2 current control ch 2',
# tangodevice = tango_base + 'toellner/pow2flip',
# ),
# pol_state = device("nicos.devices.generic.MultiSwitcher",
# description = "Guide field switcher",
# moveables = ["pflipper", "aflipper"],
# mapping = {
# "dd": ("down", "down"),
# "du": ("down", "up"),
# "ud": ("up", "down"),
# "uu": ("up", "up"),
# },
# precision = None,
# unit = ''
# ),
)
| 32.071429
| 66
| 0.522643
|
ccbc95d7e3298b5f91188ddc25b7cabd7669513a
| 6,536
|
py
|
Python
|
kubernetes/client/models/v1beta1_ingress_list.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | 3
|
2019-05-19T05:05:37.000Z
|
2020-03-20T04:56:20.000Z
|
kubernetes/client/models/v1beta1_ingress_list.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | null | null | null |
kubernetes/client/models/v1beta1_ingress_list.py
|
Scalr/kubernetes-client-python
|
07442bdb76f0876ec96c0b0da6f9c4b06d7e5e38
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1IngressList(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'api_version': 'str',
'items': 'list[V1beta1Ingress]',
'kind': 'str',
'metadata': 'V1ListMeta'
}
attribute_map = {
'api_version': 'apiVersion',
'items': 'items',
'kind': 'kind',
'metadata': 'metadata'
}
def __init__(self, api_version=None, items=None, kind=None, metadata=None):
"""
V1beta1IngressList - a model defined in Swagger
"""
self._api_version = None
self._items = None
self._kind = None
self._metadata = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.items = items
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
@property
def api_version(self):
"""
Gets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:return: The api_version of this V1beta1IngressList.
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""
Sets the api_version of this V1beta1IngressList.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources
:param api_version: The api_version of this V1beta1IngressList.
:type: str
"""
self._api_version = api_version
@property
def items(self):
"""
Gets the items of this V1beta1IngressList.
Items is the list of Ingress.
:return: The items of this V1beta1IngressList.
:rtype: list[V1beta1Ingress]
"""
return self._items
@items.setter
def items(self, items):
"""
Sets the items of this V1beta1IngressList.
Items is the list of Ingress.
:param items: The items of this V1beta1IngressList.
:type: list[V1beta1Ingress]
"""
if items is None:
raise ValueError("Invalid value for `items`, must not be `None`")
self._items = items
@property
def kind(self):
"""
Gets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:return: The kind of this V1beta1IngressList.
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""
Sets the kind of this V1beta1IngressList.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
:param kind: The kind of this V1beta1IngressList.
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""
Gets the metadata of this V1beta1IngressList.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:return: The metadata of this V1beta1IngressList.
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1beta1IngressList.
Standard object's metadata. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#metadata
:param metadata: The metadata of this V1beta1IngressList.
:type: V1ListMeta
"""
self._metadata = metadata
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1IngressList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 30.830189
| 281
| 0.606487
|
b10c8f099a3d13a2743552e63818acef09a9a4fe
| 4,866
|
py
|
Python
|
src/old_code/PredGui.py
|
amirjankar/graph-analysis
|
f16f0ffa34e295870ccc1fac2adfcefe11783524
|
[
"MIT"
] | 1
|
2019-06-23T00:07:00.000Z
|
2019-06-23T00:07:00.000Z
|
src/old_code/PredGui.py
|
amirjankar/graph-analysis
|
f16f0ffa34e295870ccc1fac2adfcefe11783524
|
[
"MIT"
] | 1
|
2019-06-23T00:10:17.000Z
|
2019-06-24T22:44:13.000Z
|
src/old_code/PredGui.py
|
amirjankar/graph-analysis
|
f16f0ffa34e295870ccc1fac2adfcefe11783524
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Sun Dec 9 17:43:17 2018
@author: charl
"""
import tkinter as tk
from tkinter import END
import pickle
import pandas as pd
from RandomForestDriver import RandomForest
from BagOfWords import bagOfWords
#Get the bags of words and the forest
f = open('sentimentBag.pickledfile', 'rb')
sentBag = pickle.load(f)
f = open('classificationBag.pickledfile', 'rb')
classBag = pickle.load(f)
f = open('forest.cooltimes', 'rb')
forest = pickle.load(f)
def classifyText(text):
return classBag.classify_string(text)
def sentimentText(text):
return sentBag.classify_string(text)
def decodeForestVal(val):
if val == 1:
return('Lots of retweets!')
if val == .5:
return('Above average')
return('Average to low')
def classifySentiment(inpStr):
if inpStr == 'Negative':
return -1
if inpStr == 'Positive':
return 1
else:
return 0
def classifyType(inpStr):
if inpStr == 'Personal':
return .2
if inpStr == 'Promotional':
return .4
if inpStr == 'Clickbait':
return .6
if inpStr == 'News':
return .8
else:
return .99
def forestClassify(num_following, num_followers, num_tweets, text):
testDat= {}
testDat['following'] = num_following
testDat['followers'] = num_followers
testDat['tweets_count'] = num_tweets
testDat['sentiment'] = classifySentiment(sentimentText(text)[0])
testDat['classification'] = classifyType(classifyText(text)[0])
test = pd.Series(testDat)
return(decodeForestVal(forest.predict(test, True)))
class Window(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.master = master
self.init_window()
def init_window(self):
self.master.title('Tweet predictor')
def calcTot():
followingCount = int(followingEntry.get())
followedCount = int(followedEntry.get())
tweetsCount = int(numTweetsEntry.get())
tweet = str(tweetEntry.get())
ans = forestClassify(followingCount, followedCount, tweetsCount, tweet)
resultText.config(state=tk.NORMAL )
resultText.delete(1.0,tk.END)
resultText.insert(tk.INSERT,ans)
resultText.config(state=tk.DISABLED)
followingEntry.delete(0,tk.END)
followingEntry.insert(tk.INSERT,'# of people you follow')
followedEntry.delete(0,tk.END)
followedEntry.insert(tk.INSERT,'# of people following you')
numTweetsEntry.delete(0,tk.END)
numTweetsEntry.insert(tk.INSERT,'# of tweets you\'ve made')
tweetEntry.delete(0,tk.END)
tweetEntry.insert(tk.INSERT,'A tweet!')
instructText = tk.Text(root, width=55, height=5 )
instructText.insert(tk.INSERT, 'Instructions')
instructText.insert(tk.INSERT, '\nWelcome to my ugly gui I hope you love it')
instructText.insert(tk.INSERT, '\nType your tweet in the box, size limit not enforced')
instructText.insert(tk.INSERT, '\nFill in the other parameters')
instructText.insert(tk.INSERT, '\nPush "Run" to calculate"')
instructText.config(state=tk.DISABLED)
instructText.pack(pady=20)
followingEntry = tk.Entry(root)
followingEntry.pack(side='left', padx=10)
followingEntry.delete(0,tk.END)
followingEntry.insert(0, '# of people you follow')
followedEntry = tk.Entry(root)
followedEntry.pack(side='left', padx=10)
followedEntry.delete(0,tk.END)
followedEntry.insert(0, '# of people following you')
numTweetsEntry = tk.Entry(root)
numTweetsEntry.pack(side='left', padx=10)
numTweetsEntry.delete(0,tk.END)
numTweetsEntry.insert(0, '# of tweets you\'ve made')
tweetEntry = tk.Entry(root)
tweetEntry.pack(padx=15, pady=20)
tweetEntry.delete(0,tk.END)
tweetEntry.insert(0, 'A tweet!')
runButton = tk.Button(text='Run', width='80', height='10', command=calcTot)
runButton.pack(padx=15, pady=20)
#The text which shows the result
resultText = tk.Text(root, width=20, height=1)
resultText.insert(tk.INSERT, 'Result')
resultText.configure(state=tk.DISABLED)
resultText.pack(side='bottom', padx=15, pady=20)
def ask_quit():
root.destroy()
root = tk.Tk()
root.protocol('WM_DELETE_WINDOW', ask_quit)
frame= tk.Frame(root)
frame.pack(fill=tk.BOTH, expand=1)
#root.geometry('400x400')
app = Window(root)
root.mainloop()
| 32.225166
| 95
| 0.610769
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.