hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7637b4dcb2b70fabf1d2ab9937260c32be85afd8
| 3,315
|
py
|
Python
|
Wrappers/Python/cil/framework/BlockGeometry.py
|
paskino/CIL
|
1803cbd445c408588fecbf705fb8b4df486029fc
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/cil/framework/BlockGeometry.py
|
paskino/CIL
|
1803cbd445c408588fecbf705fb8b4df486029fc
|
[
"Apache-2.0"
] | null | null | null |
Wrappers/Python/cil/framework/BlockGeometry.py
|
paskino/CIL
|
1803cbd445c408588fecbf705fb8b4df486029fc
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# CCP in Tomographic Imaging (CCPi) Core Imaging Library (CIL).
# Copyright 2017 UKRI-STFC
# Copyright 2017 University of Manchester
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy
from numbers import Number
import functools
from cil.framework import BlockDataContainer
class BlockGeometry(object):
RANDOM = 'random'
RANDOM_INT = 'random_int'
'''Class to hold Geometry as column vector'''
#__array_priority__ = 1
def __init__(self, *args, **kwargs):
''''''
self.geometries = args
self.index = 0
shape = (len(args),1)
self.shape = shape
n_elements = functools.reduce(lambda x,y: x*y, shape, 1)
if len(args) != n_elements:
raise ValueError(
'Dimension and size do not match: expected {} got {}'
.format(n_elements, len(args)))
def get_item(self, index):
'''returns the Geometry in the BlockGeometry located at position index'''
return self.geometries[index]
def allocate(self, value=0, dimension_labels=None, **kwargs):
max_value = kwargs.get('max_value', 100)
symmetry = kwargs.get('symmetry',False)
containers = [geom.allocate(value, max_value = max_value) for geom in self.geometries]
if symmetry == True:
# for 2x2
# [ ig11, ig12\
# ig21, ig22]
# Row-wise Order
if len(containers)==4:
containers[1]=containers[2]
# for 3x3
# [ ig11, ig12, ig13\
# ig21, ig22, ig23\
# ig31, ig32, ig33]
elif len(containers)==9:
containers[1]=containers[3]
containers[2]=containers[6]
containers[5]=containers[7]
# for 4x4
# [ ig11, ig12, ig13, ig14\
# ig21, ig22, ig23, ig24\
# ig31, ig32, ig33, ig34
# ig41, ig42, ig43, ig44]
elif len(containers) == 16:
containers[1]=containers[4]
containers[2]=containers[8]
containers[3]=containers[12]
containers[6]=containers[9]
containers[7]=containers[10]
containers[11]=containers[15]
return BlockDataContainer(*containers)
| 33.826531
| 95
| 0.539367
|
64f9c53d82a7c418d6952c79afd3930445d54f13
| 168
|
py
|
Python
|
orochi/__init__.py
|
garanews/orochi
|
ec037ad5836855cd1a734fa1bf40ecc5647cb824
|
[
"MIT"
] | 121
|
2020-09-25T16:14:44.000Z
|
2022-03-09T21:21:36.000Z
|
orochi/__init__.py
|
garanews/orochi
|
ec037ad5836855cd1a734fa1bf40ecc5647cb824
|
[
"MIT"
] | 382
|
2020-09-28T06:46:21.000Z
|
2022-03-31T11:02:19.000Z
|
orochi/__init__.py
|
garanews/orochi
|
ec037ad5836855cd1a734fa1bf40ecc5647cb824
|
[
"MIT"
] | 10
|
2020-09-29T16:36:30.000Z
|
2022-01-18T14:02:09.000Z
|
__version__ = "1.2.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
| 21
| 62
| 0.547619
|
a58dba5f71bbfa5fc8b7763e0437c45886c3552d
| 3,408
|
py
|
Python
|
utils/Halftone/halftone.py
|
Nikronic/U-net
|
3c560ac66e0d1844e7a29326920930d7bea69a63
|
[
"MIT"
] | 29
|
2018-11-17T23:00:05.000Z
|
2022-03-30T11:56:40.000Z
|
utils/Halftone/halftone.py
|
Nikronic/U-net
|
3c560ac66e0d1844e7a29326920930d7bea69a63
|
[
"MIT"
] | 13
|
2018-11-22T12:45:35.000Z
|
2019-09-13T13:26:36.000Z
|
utils/Halftone/halftone.py
|
Nikronic/U-net
|
3c560ac66e0d1844e7a29326920930d7bea69a63
|
[
"MIT"
] | 3
|
2019-04-29T10:45:47.000Z
|
2020-12-08T02:56:20.000Z
|
# %% libraries
import PIL.Image as Image
import numpy.matlib
import numpy as np
import random
import math
# %% functions
dithMat =[
# # 8x8 sprial
[[62, 58, 45, 41, 37, 49, 53, 61],
[54, 34, 25, 21, 17, 29, 33, 57],
[ 50, 30, 13, 9, 5, 12, 24, 44],
[ 38, 18, 6, 1, 0, 8, 20, 40],
[42, 22, 10, 2, 3, 4, 16, 36],
[46, 26, 14, 7, 11, 15, 28, 48],
[59, 35, 31, 19, 23, 27, 32, 52],
[ 63, 55, 51, 39, 43, 47, 56, 60]],
# # 8x8 dispresed
[[ 1, 30, 8, 28, 2, 29, 7, 27],
[ 17, 9, 24, 16, 18, 10, 23, 15],
[ 5, 25, 3, 32, 6, 26, 4, 31],
[ 21, 13, 19, 11, 22, 14, 20, 12],
[ 2, 29, 7, 27, 1, 30, 8, 28],
[ 18, 10, 23, 15, 17, 9, 24, 16],
[ 6, 26, 4, 31, 5, 25, 3, 32],
[ 22, 14, 20, 12, 21, 13, 19, 11]],
# 8X8 octa_dot
[[ 45, 17, 25, 37, 47, 19, 27, 39],
[ 49, 1, 9, 57, 51, 3, 11, 59],
[ 29, 33, 41, 21, 31, 35, 43, 23],
[ 13, 61, 53, 5, 15, 63, 55, 7],
[ 48, 20, 28, 40, 46, 18, 26, 38],
[ 52, 4, 12, 60, 50, 2, 10, 58],
[ 32, 36, 44, 24, 30, 34, 42, 22],
[ 16, 64, 56, 8, 14, 62, 54, 6]],
# # 5x5 diamond
[[ 5, 118, 160, 58, 17],
[ 48, 201, 232, 170, 99],
[ 129, 211, 252, 242, 150],
[ 89, 191, 221, 181, 68],
[ 38, 78, 140, 108, 27]],
# # 5x5 clockwise sprial
[[3, 10, 16, 11, 4],
[ 9, 20, 21, 17, 12],
[ 15, 24, 25, 22, 13],
[ 8, 19, 23, 18, 5],
[ 2, 7, 14, 6, 1]],
# 4x4 ordered
[[ 5, 9, 6, 10],
[ 13, 1, 14, 2],
[ 7 ,11, 4, 8],
[ 15, 3, 12, 0]],
]
def get_res_dmat(channel_size, dith_mat):
new_sz_y, new_sz_x = channel_size[1],channel_size[0]
min_dmat = min(min(dith_mat))
max_dmat = max(max(dith_mat))
nb_of_intervals = max_dmat-min_dmat+2
single_interval = 255/nb_of_intervals
scaled_dith_mat = np.multiply(np.subtract(dith_mat, min_dmat + 1), single_interval)
scaled_dith_mat = scaled_dith_mat.astype(int)
dmat_sz_y, dmat_sz_x = len(scaled_dith_mat),len(scaled_dith_mat[0])
n_x = math.ceil(new_sz_x / dmat_sz_x)
n_y = math.ceil(new_sz_y / dmat_sz_y)
res_dmat = np.matlib.repmat(scaled_dith_mat.astype(int), n_y, n_x)[:new_sz_y,:new_sz_x]
return res_dmat
def generate_halftone(im):
cmyk_im = im.convert('CMYK')
dith_mat_sample = dithMat[random.randint(0, len(dithMat) - 1)]
cmyk = cmyk_im.split()
angles = [[ 15, 45, 0, 75],
[ 45, 15, 0, 75],
]
angles = angles[random.randint(0, len(angles) - 1)]
if cmyk[0] == cmyk[1] == cmyk[2] :
angles = angles[:1]*4
dots = []
for x,i in enumerate(cmyk):
channel_rotation = i.rotate(angles[x], expand=1)
channel = np.asarray(channel_rotation) > get_res_dmat(channel_rotation.size, dith_mat_sample)
channel = Image.fromarray((channel * 255).astype('uint8')).convert('L').rotate(-angles[x], expand=1)
# https://stackoverflow.com/questions/27622834/write-numpy-ndarray-to-image
# reason of casting to 'uint8'
w,h = channel.size
im_x,im_y = i.size
x1 = (w-im_x)/2
y1 = (h-im_y)/2
channel = channel.crop((x1, y1, x1+im_x, y1+im_y))
dots.append(channel)
halftoned_im = Image.merge('CMYK',dots)
return halftoned_im.convert('RGB')
# %% test
# im = Image.open('data/Places365_val_00000001.jpg')
# imh = generate_halftone(im)
# imh.show()
| 31.850467
| 108
| 0.539613
|
42bb181df153a2b949b08ead45a58aeaa89bcaca
| 35
|
py
|
Python
|
pycalc/constants.py
|
TimothyDJones/pyqt5-calc
|
61813f76d0235301eb21b0b53a7a6910a2b78f1f
|
[
"MIT"
] | 2
|
2021-04-25T13:32:58.000Z
|
2021-12-22T17:25:11.000Z
|
pycalc/constants.py
|
TimothyDJones/pyqt5-calc
|
61813f76d0235301eb21b0b53a7a6910a2b78f1f
|
[
"MIT"
] | null | null | null |
pycalc/constants.py
|
TimothyDJones/pyqt5-calc
|
61813f76d0235301eb21b0b53a7a6910a2b78f1f
|
[
"MIT"
] | null | null | null |
# constants.py
ERROR_MSG = "ERROR"
| 11.666667
| 19
| 0.714286
|
e49ff91b87a3b6a172073b4d2868b8907d2b62bc
| 684
|
py
|
Python
|
test_package/conanfile.py
|
ralfschulze/conan-liblxi
|
8fcb2f686ae850c047571952dcc9f1214c1afd69
|
[
"MIT"
] | null | null | null |
test_package/conanfile.py
|
ralfschulze/conan-liblxi
|
8fcb2f686ae850c047571952dcc9f1214c1afd69
|
[
"MIT"
] | null | null | null |
test_package/conanfile.py
|
ralfschulze/conan-liblxi
|
8fcb2f686ae850c047571952dcc9f1214c1afd69
|
[
"MIT"
] | null | null | null |
import os
from conans import ConanFile, CMake, tools
class LibLxiTestConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def build(self):
cmake = CMake(self)
# Current dir is "test_package/build/<build_id>" and CMakeLists.txt is
# in "test_package"
cmake.configure()
cmake.build()
def imports(self):
self.copy("*.dll", dst="bin", src="bin")
self.copy("*.dylib*", dst="bin", src="lib")
self.copy('*.so*', dst='bin', src='lib')
def test(self):
if not tools.cross_building(self):
os.chdir("bin")
self.run(".%sexample" % os.sep)
| 26.307692
| 78
| 0.571637
|
41e3dede130b535cc9fdecb20039a4e2832ab6a3
| 1,634
|
py
|
Python
|
test/store/mem_server_store_test.py
|
bowlofstew/bii-server
|
9abc558b1ba9722088e22a4937a224cfcc3c5f22
|
[
"MIT"
] | 27
|
2015-04-15T09:40:26.000Z
|
2020-04-29T06:24:06.000Z
|
test/store/mem_server_store_test.py
|
bowlofstew/bii-server
|
9abc558b1ba9722088e22a4937a224cfcc3c5f22
|
[
"MIT"
] | 2
|
2015-06-03T20:19:51.000Z
|
2015-06-30T20:40:14.000Z
|
test/store/mem_server_store_test.py
|
bowlofstew/bii-server
|
9abc558b1ba9722088e22a4937a224cfcc3c5f22
|
[
"MIT"
] | 10
|
2015-08-06T08:17:28.000Z
|
2020-09-29T17:02:57.000Z
|
from biicode.server.model.block import Block
from biicode.common.model.id import ID
from biicode.common.publish.publish_request import PublishRequest
from biicode.common.model.cells import SimpleCell
from biicode.common.model.content import Content
from biicode.common.model.blob import Blob
import datetime
from biicode.common.model.brl.brl_block import BRLBlock
from biicode.test.testing_mem_server_store import TestingMemServerStore
from biicode.server.test.store.mongo_test import TestWithMongo
from nose_parameterized.parameterized import parameterized
from biicode.server.store.mongo_server_store import MongoServerStore
class MemServerStoreTest(TestWithMongo):
@parameterized.expand([(MongoServerStore, ), (TestingMemServerStore, )])
def test_read_published_blocks_info(self, store_cls):
"""Insert a block and read all published blocks info (brl, lastpubdate)"""
if store_cls == MongoServerStore:
store = MongoServerStore(self.conn, self.__class__.__name__)
else:
store = TestingMemServerStore()
block = Block(ID((23, 23)), BRLBlock("bonjovi/bonjovi/itsmylife/master"))
ppack = PublishRequest(block.last_version())
r1 = SimpleCell('user/block/r1.h')
ppack.cells.append(r1)
ppack.contents['r1.h'] = Content(id_=None, load=Blob('hola'))
block.add_publication(ppack)
store.create_block(block, False)
ret = store.read_published_blocks_info()
first = ret.next()
self.assertEquals(first[0], "bonjovi/bonjovi/itsmylife/master")
self.assertEquals(first[1].__class__, datetime.datetime)
| 44.162162
| 82
| 0.746022
|
86ea28d319e18609cecbfdf60e793696d008bfb4
| 8,925
|
py
|
Python
|
contrib/linearize/linearize-data.py
|
pmo24/flash
|
1c8acedc0ac3ce28321e98a6a54eb3b15d32052e
|
[
"MIT"
] | null | null | null |
contrib/linearize/linearize-data.py
|
pmo24/flash
|
1c8acedc0ac3ce28321e98a6a54eb3b15d32052e
|
[
"MIT"
] | null | null | null |
contrib/linearize/linearize-data.py
|
pmo24/flash
|
1c8acedc0ac3ce28321e98a6a54eb3b15d32052e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#
# linearize-data.py: Construct a linear, no-fork version of the chain.
#
# Copyright (c) 2013-2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import print_function, division
import json
import struct
import re
import os
import os.path
import base64
import httplib
import sys
import hashlib
import flash_hash
import datetime
import time
from collections import namedtuple
settings = {}
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
def calc_hdr_hash(blk_hdr):
#hash1 = hashlib.sha256()
#hash1.update(blk_hdr)
#hash1_o = hash1.digest()
#hash2 = hashlib.sha256()
#hash2.update(hash1_o)
#hash2_o = hash2.digest()
#return hash2_o
pow_hash = flash_hash.getPoWHash(blk_hdr)
return pow_hash
def calc_hash_str(blk_hdr):
hash = calc_hdr_hash(blk_hdr)
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
return hash_str
def get_blk_dt(blk_hdr):
members = struct.unpack("<I", blk_hdr[68:68+4])
nTime = members[0]
dt = datetime.datetime.fromtimestamp(nTime)
dt_ym = datetime.datetime(dt.year, dt.month, 1)
return (dt_ym, nTime)
def get_block_hashes(settings):
blkindex = []
f = open(settings['hashlist'], "r")
for line in f:
line = line.rstrip()
blkindex.append(line)
print("Read " + str(len(blkindex)) + " hashes")
return blkindex
def mkblockmap(blkindex):
blkmap = {}
for height,hash in enumerate(blkindex):
blkmap[hash] = height
return blkmap
# Block header and extent on disk
BlockExtent = namedtuple('BlockExtent', ['fn', 'offset', 'inhdr', 'blkhdr', 'size'])
class BlockDataCopier:
def __init__(self, settings, blkindex, blkmap):
self.settings = settings
self.blkindex = blkindex
self.blkmap = blkmap
self.inFn = 0
self.inF = None
self.outFn = 0
self.outsz = 0
self.outF = None
self.outFname = None
self.blkCountIn = 0
self.blkCountOut = 0
self.lastDate = datetime.datetime(2000, 1, 1)
self.highTS = 1408893517 - 315360000
self.timestampSplit = False
self.fileOutput = True
self.setFileTime = False
self.maxOutSz = settings['max_out_sz']
if 'output' in settings:
self.fileOutput = False
if settings['file_timestamp'] != 0:
self.setFileTime = True
if settings['split_timestamp'] != 0:
self.timestampSplit = True
# Extents and cache for out-of-order blocks
self.blockExtents = {}
self.outOfOrderData = {}
self.outOfOrderSize = 0 # running total size for items in outOfOrderData
def writeBlock(self, inhdr, blk_hdr, rawblock):
blockSizeOnDisk = len(inhdr) + len(blk_hdr) + len(rawblock)
if not self.fileOutput and ((self.outsz + blockSizeOnDisk) > self.maxOutSz):
self.outF.close()
if self.setFileTime:
os.utime(outFname, (int(time.time()), highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
self.outsz = 0
(blkDate, blkTS) = get_blk_dt(blk_hdr)
if self.timestampSplit and (blkDate > self.lastDate):
print("New month " + blkDate.strftime("%Y-%m") + " @ " + hash_str)
lastDate = blkDate
if outF:
outF.close()
if setFileTime:
os.utime(outFname, (int(time.time()), highTS))
self.outF = None
self.outFname = None
self.outFn = self.outFn + 1
self.outsz = 0
if not self.outF:
if self.fileOutput:
outFname = self.settings['output_file']
else:
outFname = os.path.join(self.settings['output'], "blk%05d.dat" % self.outFn)
print("Output file " + outFname)
self.outF = open(outFname, "wb")
self.outF.write(inhdr)
self.outF.write(blk_hdr)
self.outF.write(rawblock)
self.outsz = self.outsz + len(inhdr) + len(blk_hdr) + len(rawblock)
self.blkCountOut = self.blkCountOut + 1
if blkTS > self.highTS:
self.highTS = blkTS
if (self.blkCountOut % 1000) == 0:
print('%i blocks scanned, %i blocks written (of %i, %.1f%% complete)' %
(self.blkCountIn, self.blkCountOut, len(self.blkindex), 100.0 * self.blkCountOut / len(self.blkindex)))
def inFileName(self, fn):
return os.path.join(self.settings['input'], "blk%05d.dat" % fn)
def fetchBlock(self, extent):
'''Fetch block contents from disk given extents'''
with open(self.inFileName(extent.fn), "rb") as f:
f.seek(extent.offset)
return f.read(extent.size)
def copyOneBlock(self):
'''Find the next block to be written in the input, and copy it to the output.'''
extent = self.blockExtents.pop(self.blkCountOut)
if self.blkCountOut in self.outOfOrderData:
# If the data is cached, use it from memory and remove from the cache
rawblock = self.outOfOrderData.pop(self.blkCountOut)
self.outOfOrderSize -= len(rawblock)
else: # Otherwise look up data on disk
rawblock = self.fetchBlock(extent)
self.writeBlock(extent.inhdr, extent.blkhdr, rawblock)
def run(self):
while self.blkCountOut < len(self.blkindex):
if not self.inF:
fname = self.inFileName(self.inFn)
print("Input file " + fname)
try:
self.inF = open(fname, "rb")
except IOError:
print("Premature end of block data")
return
inhdr = self.inF.read(8)
if (not inhdr or (inhdr[0] == "\0")):
self.inF.close()
self.inF = None
self.inFn = self.inFn + 1
continue
inMagic = inhdr[:4]
if (inMagic != self.settings['netmagic']):
print("Invalid magic: " + inMagic.encode('hex'))
return
inLenLE = inhdr[4:]
su = struct.unpack("<I", inLenLE)
inLen = su[0] - 80 # length without header
blk_hdr = self.inF.read(80)
inExtent = BlockExtent(self.inFn, self.inF.tell(), inhdr, blk_hdr, inLen)
hash_str = calc_hash_str(blk_hdr)
if not hash_str in blkmap:
print("Skipping unknown block " + hash_str)
self.inF.seek(inLen, os.SEEK_CUR)
continue
blkHeight = self.blkmap[hash_str]
self.blkCountIn += 1
if self.blkCountOut == blkHeight:
# If in-order block, just copy
rawblock = self.inF.read(inLen)
self.writeBlock(inhdr, blk_hdr, rawblock)
# See if we can catch up to prior out-of-order blocks
while self.blkCountOut in self.blockExtents:
self.copyOneBlock()
else: # If out-of-order, skip over block data for now
self.blockExtents[blkHeight] = inExtent
if self.outOfOrderSize < self.settings['out_of_order_cache_sz']:
# If there is space in the cache, read the data
# Reading the data in file sequence instead of seeking and fetching it later is preferred,
# but we don't want to fill up memory
self.outOfOrderData[blkHeight] = self.inF.read(inLen)
self.outOfOrderSize += inLen
else: # If no space in cache, seek forward
self.inF.seek(inLen, os.SEEK_CUR)
print("Done (%i blocks written)" % (self.blkCountOut))
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: linearize-data.py CONFIG-FILE")
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'cee2caff'
if 'genesis' not in settings:
settings['genesis'] = '00000bafbc94add76cb75e2ec92894837288a481e5c005f6563d91623bf8bc2c'
if 'input' not in settings:
settings['input'] = 'input'
if 'hashlist' not in settings:
settings['hashlist'] = 'hashlist.txt'
if 'file_timestamp' not in settings:
settings['file_timestamp'] = 0
if 'split_timestamp' not in settings:
settings['split_timestamp'] = 0
if 'max_out_sz' not in settings:
settings['max_out_sz'] = 1000L * 1000 * 1000
if 'out_of_order_cache_sz' not in settings:
settings['out_of_order_cache_sz'] = 100 * 1000 * 1000
settings['max_out_sz'] = long(settings['max_out_sz'])
settings['split_timestamp'] = int(settings['split_timestamp'])
settings['file_timestamp'] = int(settings['file_timestamp'])
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['out_of_order_cache_sz'] = int(settings['out_of_order_cache_sz'])
if 'output_file' not in settings and 'output' not in settings:
print("Missing output file / directory")
sys.exit(1)
blkindex = get_block_hashes(settings)
blkmap = mkblockmap(blkindex)
if not settings['genesis'] in blkmap:
print("Genesis block not found in hashlist")
else:
BlockDataCopier(settings, blkindex, blkmap).run()
| 29.166667
| 108
| 0.6893
|
0bd6d3a4747d8826e9f4b66c8e7abe05a1830a60
| 155,009
|
py
|
Python
|
erpUI.py
|
abhiraj-ranjan/Clouster
|
d5a2b9a1184c5ff86ce8a34c9b334597f3ee2ebc
|
[
"CC-BY-4.0"
] | null | null | null |
erpUI.py
|
abhiraj-ranjan/Clouster
|
d5a2b9a1184c5ff86ce8a34c9b334597f3ee2ebc
|
[
"CC-BY-4.0"
] | null | null | null |
erpUI.py
|
abhiraj-ranjan/Clouster
|
d5a2b9a1184c5ff86ce8a34c9b334597f3ee2ebc
|
[
"CC-BY-4.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'D:\all\python projects\python os\ui\Simple_PySide_Base-org\e\erpUI.ui'
#
# Created by: PyQt5 UI code generator 5.15.0
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
import os
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(913, 547)
Form.setStyleSheet("")
self.verticalLayout_9 = QtWidgets.QVBoxLayout(Form)
self.verticalLayout_9.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_9.setSpacing(0)
self.verticalLayout_9.setObjectName("verticalLayout_9")
self.frame = QtWidgets.QFrame(Form)
self.frame.setStyleSheet("QTextEdit {\n"
" border-radius: 5px;\n"
" border: 2px solid rgb(27, 29, 35);\n"
" padding-left: 10px;\n"
"}\n"
"QTextEdit:hover {\n"
" border: 2px solid rgb(64, 71, 88);\n"
"}\n"
"QTextEdit:focus {\n"
" border: 2px solid rgb(91, 101, 124);\n"
"}\n"
"QFrame{\n"
" background-color: transparent;\n"
"}\n"
"/* LINE EDIT */\n"
"QLineEdit {\n"
" background-color: rgb(27, 29, 35);\n"
" border-radius: 5px;\n"
" border: 2px solid rgb(27, 29, 35);\n"
" padding-left: 10px;\n"
"}\n"
"QLineEdit:hover {\n"
" border: 2px solid rgb(64, 71, 88);\n"
"}\n"
"QLineEdit:focus {\n"
" border: 2px solid rgb(91, 101, 124);\n"
"}\n"
"\n"
"/* SCROLL BARS */\n"
"QScrollBar:horizontal {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" height: 14px;\n"
" margin: 0px 21px 0 21px;\n"
" border-radius: 0px;\n"
"}\n"
"QScrollBar::handle:horizontal {\n"
" background: rgb(85, 170, 255);\n"
" min-width: 25px;\n"
" border-radius: 7px\n"
"}\n"
"QScrollBar::add-line:horizontal {\n"
" border: none;\n"
" background: rgb(55, 63, 77);\n"
" width: 20px;\n"
" border-top-right-radius: 7px;\n"
" border-bottom-right-radius: 7px;\n"
" subcontrol-position: right;\n"
" subcontrol-origin: margin;\n"
"}\n"
"QScrollBar::sub-line:horizontal {\n"
" border: none;\n"
" background: rgb(55, 63, 77);\n"
" width: 20px;\n"
" border-top-left-radius: 7px;\n"
" border-bottom-left-radius: 7px;\n"
" subcontrol-position: left;\n"
" subcontrol-origin: margin;\n"
"}\n"
"QScrollBar::up-arrow:horizontal, QScrollBar::down-arrow:horizontal\n"
"{\n"
" background: none;\n"
"}\n"
"QScrollBar::add-page:horizontal, QScrollBar::sub-page:horizontal\n"
"{\n"
" background: none;\n"
"}\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" width: 14px;\n"
" margin: 21px 0 21px 0;\n"
" border-radius: 0px;\n"
" }\n"
" QScrollBar::handle:vertical { \n"
" background: rgb(85, 170, 255);\n"
" min-height: 25px;\n"
" border-radius: 7px\n"
" }\n"
" QScrollBar::add-line:vertical {\n"
" border: none;\n"
" background: rgb(55, 63, 77);\n"
" height: 20px;\n"
" border-bottom-left-radius: 7px;\n"
" border-bottom-right-radius: 7px;\n"
" subcontrol-position: bottom;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::sub-line:vertical {\n"
" border: none;\n"
" background: rgb(55, 63, 77);\n"
" height: 20px;\n"
" border-top-left-radius: 7px;\n"
" border-top-right-radius: 7px;\n"
" subcontrol-position: top;\n"
" subcontrol-origin: margin;\n"
" }\n"
" QScrollBar::up-arrow:vertical, QScrollBar::down-arrow:vertical {\n"
" background: none;\n"
" }\n"
"\n"
" QScrollBar::add-page:vertical, QScrollBar::sub-page:vertical {\n"
" background: none;\n"
" }\n"
"\n"
"/* CHECKBOX */\n"
"QCheckBox::indicator {\n"
" border: 3px solid rgb(52, 59, 72);\n"
" width: 15px;\n"
" height: 15px;\n"
" border-radius: 10px;\n"
" background: rgb(44, 49, 60);\n"
"}\n"
"QCheckBox::indicator:hover {\n"
" border: 3px solid rgb(58, 66, 81);\n"
"}\n"
"QCheckBox::indicator:checked {\n"
" background: 3px solid rgb(52, 59, 72);\n"
" border: 3px solid rgb(52, 59, 72); \n"
"}\n"
"\n"
"/* RADIO BUTTON */\n"
"QRadioButton::indicator {\n"
" border: 3px solid rgb(52, 59, 72);\n"
" width: 15px;\n"
" height: 15px;\n"
" border-radius: 10px;\n"
" background: rgb(44, 49, 60);\n"
"}\n"
"QRadioButton::indicator:hover {\n"
" border: 3px solid rgb(58, 66, 81);\n"
"}\n"
"QRadioButton::indicator:checked {\n"
" background: 3px solid rgb(94, 106, 130);\n"
" border: 3px solid rgb(52, 59, 72); \n"
"}\n"
"\n"
"/* COMBOBOX */\n"
"QComboBox{\n"
" background-color: rgb(27, 29, 35);\n"
" border-radius: 5px;\n"
" border: 2px solid rgb(27, 29, 35);\n"
" padding: 5px;\n"
" padding-left: 10px;\n"
"}\n"
"QComboBox:hover{\n"
" border: 2px solid rgb(64, 71, 88);\n"
"}\n"
"QComboBox::drop-down {\n"
" subcontrol-origin: padding;\n"
" subcontrol-position: top right;\n"
" width: 25px; \n"
" border-left-width: 3px;\n"
" border-left-color: rgba(39, 44, 54, 150);\n"
" border-left-style: solid;\n"
" border-top-right-radius: 3px;\n"
" border-bottom-right-radius: 3px; \n"
" background-position: center;\n"
" background-repeat: no-reperat;\n"
" }\n"
"QComboBox QAbstractItemView {\n"
" color: rgb(85, 170, 255); \n"
" background-color: rgb(27, 29, 35);\n"
" padding: 10px;\n"
" selection-background-color: rgb(39, 44, 54);\n"
"}\n"
"\n"
"/* SLIDERS */\n"
"QSlider::groove:horizontal {\n"
" border-radius: 9px;\n"
" height: 18px;\n"
" margin: 0px;\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QSlider::groove:horizontal:hover {\n"
" background-color: rgb(55, 62, 76);\n"
"}\n"
"QSlider::handle:horizontal {\n"
" background-color: rgb(85, 170, 255);\n"
" border: none;\n"
" height: 18px;\n"
" width: 18px;\n"
" margin: 0px;\n"
" border-radius: 9px;\n"
"}\n"
"QSlider::handle:horizontal:hover {\n"
" background-color: rgb(105, 180, 255);\n"
"}\n"
"QSlider::handle:horizontal:pressed {\n"
" background-color: rgb(65, 130, 195);\n"
"}\n"
"\n"
"QSlider::groove:vertical {\n"
" border-radius: 9px;\n"
" width: 18px;\n"
" margin: 0px;\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QSlider::groove:vertical:hover {\n"
" background-color: rgb(55, 62, 76);\n"
"}\n"
"QSlider::handle:vertical {\n"
" background-color: rgb(85, 170, 255);\n"
" border: none;\n"
" height: 18px;\n"
" width: 18px;\n"
" margin: 0px;\n"
" border-radius: 9px;\n"
"}\n"
"QSlider::handle:vertical:hover {\n"
" background-color: rgb(105, 180, 255);\n"
"}\n"
"QSlider::handle:vertical:pressed {\n"
" background-color: rgb(65, 130, 195);\n"
"}\n"
"\n"
"\n"
"QPushButton {\n"
" background-position: center;\n"
" background-repeat: no-repeat;\n"
" border: none;\n"
" background-color: rgb(27, 29, 35);\n"
"}\n"
"\n"
"QPushButton:hover {\n"
" background-color: rgb(33, 37, 43);\n"
"}\n"
"QPushButton:pressed {\n"
" background-color: rgb(85, 170, 255);\n"
"}")
self.frame.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setLineWidth(0)
self.frame.setObjectName("frame")
self.verticalLayout = QtWidgets.QVBoxLayout(self.frame)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName("verticalLayout")
self.frame_2 = QtWidgets.QFrame(self.frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_2.sizePolicy().hasHeightForWidth())
self.frame_2.setSizePolicy(sizePolicy)
self.frame_2.setMinimumSize(QtCore.QSize(0, 42))
self.frame_2.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.frame_2.setStyleSheet("background-color:transparent;\n"
"border:0px;")
self.frame_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_2.setObjectName("frame_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.frame_2)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.frame_4 = QtWidgets.QFrame(self.frame_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_4.sizePolicy().hasHeightForWidth())
self.frame_4.setSizePolicy(sizePolicy)
self.frame_4.setStyleSheet("background-color:#121212;\n"
"border:0px;")
self.frame_4.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.frame_4)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.title_bar = QtWidgets.QFrame(self.frame_4)
self.title_bar.setStyleSheet("background: transparent;\n"
"")
self.title_bar.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.title_bar.setFrameShadow(QtWidgets.QFrame.Raised)
self.title_bar.setObjectName("title_bar")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout(self.title_bar)
self.horizontalLayout_4.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_4.setSpacing(0)
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.about = QtWidgets.QPushButton(self.title_bar)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.about.sizePolicy().hasHeightForWidth())
self.about.setSizePolicy(sizePolicy)
self.about.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-code.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.about.setIcon(icon)
self.about.setIconSize(QtCore.QSize(42, 42))
self.about.setObjectName("about")
self.horizontalLayout_4.addWidget(self.about)
self.label_2 = QtWidgets.QLabel(self.title_bar)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setStyleSheet("background: transparent;\n"
"color:white;\n"
"")
self.label_2.setIndent(0)
self.label_2.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_2.setObjectName("label_2")
self.horizontalLayout_4.addWidget(self.label_2)
self.horizontalLayout_3.addWidget(self.title_bar)
self.miniBtn = QtWidgets.QPushButton(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.miniBtn.sizePolicy().hasHeightForWidth())
self.miniBtn.setSizePolicy(sizePolicy)
self.miniBtn.setMaximumSize(QtCore.QSize(40, 42))
self.miniBtn.setToolTip("")
self.miniBtn.setToolTipDuration(2)
self.miniBtn.setStyleSheet("QPushButton { \n"
" border: none;\n"
" background-color: transparent;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QPushButton:pressed { \n"
" background-color: rgb(85, 170, 255);\n"
"}")
self.miniBtn.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-window-minimize.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.miniBtn.setIcon(icon1)
self.miniBtn.setAutoDefault(True)
self.miniBtn.setDefault(True)
self.miniBtn.setFlat(False)
self.miniBtn.setObjectName("miniBtn")
self.horizontalLayout_3.addWidget(self.miniBtn)
self.maxBtn = QtWidgets.QPushButton(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.maxBtn.sizePolicy().hasHeightForWidth())
self.maxBtn.setSizePolicy(sizePolicy)
self.maxBtn.setMaximumSize(QtCore.QSize(40, 42))
self.maxBtn.setStyleSheet("QPushButton { \n"
" border: none;\n"
" background-color: transparent;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QPushButton:pressed { \n"
" background-color: rgb(85, 170, 255);\n"
"}")
self.maxBtn.setText("")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-window-maximize.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.maxBtn.setIcon(icon2)
self.maxBtn.setAutoDefault(True)
self.maxBtn.setDefault(True)
self.maxBtn.setFlat(False)
self.maxBtn.setObjectName("maxBtn")
self.horizontalLayout_3.addWidget(self.maxBtn)
self.closeBtn = QtWidgets.QPushButton(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.closeBtn.sizePolicy().hasHeightForWidth())
self.closeBtn.setSizePolicy(sizePolicy)
self.closeBtn.setMaximumSize(QtCore.QSize(40, 42))
self.closeBtn.setStyleSheet("QPushButton { \n"
" border: none;\n"
" background-color: transparent;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QPushButton:pressed { \n"
" background-color: rgb(85, 170, 255);\n"
"}")
self.closeBtn.setText("")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-x.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.closeBtn.setIcon(icon3)
self.closeBtn.setAutoDefault(True)
self.closeBtn.setDefault(True)
self.closeBtn.setFlat(False)
self.closeBtn.setObjectName("closeBtn")
self.horizontalLayout_3.addWidget(self.closeBtn)
self.horizontalLayout_2.addWidget(self.frame_4)
self.verticalLayout.addWidget(self.frame_2)
self.frame_7 = QtWidgets.QFrame(self.frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_7.sizePolicy().hasHeightForWidth())
self.frame_7.setSizePolicy(sizePolicy)
self.frame_7.setMinimumSize(QtCore.QSize(0, 0))
self.frame_7.setStyleSheet("border: 0px;\n"
"background-color:rgb(36, 36, 36);\n"
"color: rgb(217, 217, 217);")
self.frame_7.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_7.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_7.setObjectName("frame_7")
self.horizontalLayout_6 = QtWidgets.QHBoxLayout(self.frame_7)
self.horizontalLayout_6.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_6.setSpacing(0)
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.verticalLayout.addWidget(self.frame_7)
self.frame_9 = QtWidgets.QFrame(self.frame)
self.frame_9.setStyleSheet("border:0px;\n"
"background-color: rgb(40, 40, 40);")
self.frame_9.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_9.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_9.setLineWidth(0)
self.frame_9.setObjectName("frame_9")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.frame_9)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.widget_3 = QtWidgets.QWidget(self.frame_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widget_3.sizePolicy().hasHeightForWidth())
self.widget_3.setSizePolicy(sizePolicy)
self.widget_3.setMinimumSize(QtCore.QSize(6, 0))
self.widget_3.setMaximumSize(QtCore.QSize(6, 16777215))
self.widget_3.setStyleSheet("background-color:rgb(255, 82, 85);\n"
"border:1px solid rgb(255, 82, 85);")
self.widget_3.setObjectName("widget_3")
self.horizontalLayout.addWidget(self.widget_3)
self.frame_5 = QtWidgets.QFrame(self.frame_9)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_5.sizePolicy().hasHeightForWidth())
self.frame_5.setSizePolicy(sizePolicy)
self.frame_5.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.frame_5.setStyleSheet("background-color: rgb(56, 56, 56);")
self.frame_5.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_5.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_5.setObjectName("frame_5")
self.verticalLayout_22 = QtWidgets.QVBoxLayout(self.frame_5)
self.verticalLayout_22.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_22.setSpacing(0)
self.verticalLayout_22.setObjectName("verticalLayout_22")
self.horizontalLayout.addWidget(self.frame_5)
self.stackedWidget = QtWidgets.QStackedWidget(self.frame_9)
self.stackedWidget.setStyleSheet("")
self.stackedWidget.setObjectName("stackedWidget")
self.settingsTab = QtWidgets.QWidget()
self.settingsTab.setObjectName("settingsTab")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.settingsTab)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.frame_14 = QtWidgets.QFrame(self.settingsTab)
self.frame_14.setStyleSheet("background-color: rgb(40, 40, 40)")
self.frame_14.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_14.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_14.setObjectName("frame_14")
self.horizontalLayout_9 = QtWidgets.QHBoxLayout(self.frame_14)
self.horizontalLayout_9.setSpacing(20)
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.pushButton_2 = QtWidgets.QPushButton(self.frame_14)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_2.sizePolicy().hasHeightForWidth())
self.pushButton_2.setSizePolicy(sizePolicy)
self.pushButton_2.setMinimumSize(QtCore.QSize(9, 0))
self.pushButton_2.setText("")
self.pushButton_2.setIcon(icon)
self.pushButton_2.setIconSize(QtCore.QSize(30, 50))
self.pushButton_2.setDefault(False)
self.pushButton_2.setFlat(True)
self.pushButton_2.setObjectName("pushButton_2")
self.horizontalLayout_9.addWidget(self.pushButton_2)
self.label = QtWidgets.QLabel(self.frame_14)
self.label.setStyleSheet("font: 18pt \"Anurati\";\n"
"color:rgb(197, 197, 197)")
self.label.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label.setObjectName("label")
self.horizontalLayout_9.addWidget(self.label, 0, QtCore.Qt.AlignRight)
self.verticalLayout_5.addWidget(self.frame_14, 0, QtCore.Qt.AlignHCenter)
self.label_credits_4 = QtWidgets.QLabel(self.settingsTab)
font = QtGui.QFont()
font.setFamily("Segoe UI")
self.label_credits_4.setFont(font)
self.label_credits_4.setStyleSheet("color: rgb(98, 103, 111);")
self.label_credits_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.label_credits_4.setIndent(5)
self.label_credits_4.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_credits_4.setObjectName("label_credits_4")
self.verticalLayout_5.addWidget(self.label_credits_4)
self.label_credits_3 = QtWidgets.QLabel(self.settingsTab)
font = QtGui.QFont()
font.setFamily("Segoe UI")
self.label_credits_3.setFont(font)
self.label_credits_3.setStyleSheet("color: rgb(98, 103, 111);")
self.label_credits_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.label_credits_3.setIndent(5)
self.label_credits_3.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_credits_3.setObjectName("label_credits_3")
self.verticalLayout_5.addWidget(self.label_credits_3)
self.frame_10 = QtWidgets.QFrame(self.settingsTab)
self.frame_10.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_10.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_10.setObjectName("frame_10")
self.horizontalLayout_8 = QtWidgets.QHBoxLayout(self.frame_10)
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.textEdit = QtWidgets.QTextEdit(self.frame_10)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit.sizePolicy().hasHeightForWidth())
self.textEdit.setSizePolicy(sizePolicy)
self.textEdit.setMinimumSize(QtCore.QSize(425, 0))
self.textEdit.setStyleSheet("QTextEdit {\n"
" border-radius: 5px;\n"
" color : rgb(213, 213, 213);\n"
" border: 2px solid rgb(64, 71, 88);\n"
" padding-left: 10px;\n"
"}\n"
"QTextEdit:hover {\n"
" border: 2px solid rgb(64, 71, 88);\n"
"}\n"
"QTextEdit:focus {\n"
" border: 2px solid rgb(91, 101, 124);\n"
"}\n"
"")
self.textEdit.setDocumentTitle("")
self.textEdit.setReadOnly(True)
self.textEdit.setObjectName("textEdit")
self.horizontalLayout_8.addWidget(self.textEdit)
self.verticalLayout_5.addWidget(self.frame_10)
self.stackedWidget.addWidget(self.settingsTab)
self.userTab = QtWidgets.QWidget()
self.userTab.setObjectName("userTab")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.userTab)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.frame_59 = QtWidgets.QFrame(self.userTab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_59.sizePolicy().hasHeightForWidth())
self.frame_59.setSizePolicy(sizePolicy)
self.frame_59.setStyleSheet("QPushButton { \n"
" border: none;\n"
" background-color: transparent;\n"
" font: 15pt \"Raavi\";\n"
" color: rgb(85, 170, 127);\n"
" padding:5px;\n"
" border-radius:10px;\n"
"\n"
"}\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QPushButton:pressed { \n"
" background-color: rgb(85, 170, 255);\n"
"}")
self.frame_59.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_59.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_59.setObjectName("frame_59")
self.horizontalLayout_25 = QtWidgets.QHBoxLayout(self.frame_59)
self.horizontalLayout_25.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_25.setSpacing(30)
self.horizontalLayout_25.setObjectName("horizontalLayout_25")
self.pushButton_10 = QtWidgets.QPushButton(self.frame_59)
self.pushButton_10.setObjectName("pushButton_10")
self.horizontalLayout_25.addWidget(self.pushButton_10)
self.pushButton_6 = QtWidgets.QPushButton(self.frame_59)
self.pushButton_6.setObjectName("pushButton_6")
self.horizontalLayout_25.addWidget(self.pushButton_6)
self.pushButton_8 = QtWidgets.QPushButton(self.frame_59)
self.pushButton_8.setObjectName("pushButton_8")
self.horizontalLayout_25.addWidget(self.pushButton_8)
self.verticalLayout_2.addWidget(self.frame_59, 0, QtCore.Qt.AlignHCenter)
self.frame_25 = QtWidgets.QFrame(self.userTab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_25.sizePolicy().hasHeightForWidth())
self.frame_25.setSizePolicy(sizePolicy)
self.frame_25.setMinimumSize(QtCore.QSize(100, 194))
self.frame_25.setStyleSheet("")
self.frame_25.setObjectName("frame_25")
self.horizontalLayout_17 = QtWidgets.QHBoxLayout(self.frame_25)
self.horizontalLayout_17.setContentsMargins(0, 0, -1, 0)
self.horizontalLayout_17.setSpacing(0)
self.horizontalLayout_17.setObjectName("horizontalLayout_17")
self.frame_3 = QtWidgets.QFrame(self.frame_25)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_3.sizePolicy().hasHeightForWidth())
self.frame_3.setSizePolicy(sizePolicy)
self.frame_3.setMinimumSize(QtCore.QSize(0, 0))
self.frame_3.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_3.setObjectName("frame_3")
self.horizontalLayout_10 = QtWidgets.QHBoxLayout(self.frame_3)
self.horizontalLayout_10.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_10.setSpacing(0)
self.horizontalLayout_10.setObjectName("horizontalLayout_10")
self.label_11 = QtWidgets.QLabel(self.frame_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_11.sizePolicy().hasHeightForWidth())
self.label_11.setSizePolicy(sizePolicy)
self.label_11.setMinimumSize(QtCore.QSize(114, 113))
self.label_11.setMaximumSize(QtCore.QSize(79, 90))
self.label_11.setText("")
#self.label_11.setPixmap(QtGui.QPixmap(":/icon/roshita.jpeg"))
#self.label_11.setScaledContents(True)
self.label_11.setObjectName("label_11")
self.horizontalLayout_10.addWidget(self.label_11, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.frame_11 = QtWidgets.QFrame(self.frame_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_11.sizePolicy().hasHeightForWidth())
self.frame_11.setSizePolicy(sizePolicy)
self.frame_11.setMinimumSize(QtCore.QSize(263, 126))
self.frame_11.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_11.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_11.setObjectName("frame_11")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.frame_11)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self._studentname = QtWidgets.QLabel(self.frame_11)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self._studentname.sizePolicy().hasHeightForWidth())
self._studentname.setSizePolicy(sizePolicy)
self._studentname.setStyleSheet("font: 35pt \"Segoe UI light\";\n"
"color: rgb(255, 71, 120);")
self._studentname.setObjectName("_studentname")
self.verticalLayout_3.addWidget(self._studentname, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.label_40 = QtWidgets.QLabel(self.frame_11)
self.label_40.setText("")
self.label_40.setObjectName("label_40")
self.verticalLayout_3.addWidget(self.label_40)
self.frame_6 = QtWidgets.QFrame(self.frame_11)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_6.sizePolicy().hasHeightForWidth())
self.frame_6.setSizePolicy(sizePolicy)
self.frame_6.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.frame_6.setStyleSheet("QLabel{\n"
" \n"
" color: rgb(85, 170, 127);\n"
" \n"
" font: 25 10pt \"Segoe UI Light\";\n"
"}")
self.frame_6.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_6.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_6.setObjectName("frame_6")
self.gridLayout_2 = QtWidgets.QGridLayout(self.frame_6)
self.gridLayout_2.setContentsMargins(20, 3, 0, 0)
self.gridLayout_2.setHorizontalSpacing(40)
self.gridLayout_2.setVerticalSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self._dob = QtWidgets.QLabel(self.frame_6)
self._dob.setObjectName("_dob")
self.gridLayout_2.addWidget(self._dob, 1, 3, 1, 1)
self.label_16 = QtWidgets.QLabel(self.frame_6)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_16.sizePolicy().hasHeightForWidth())
self.label_16.setSizePolicy(sizePolicy)
self.label_16.setMinimumSize(QtCore.QSize(82, 17))
self.label_16.setObjectName("label_16")
self.gridLayout_2.addWidget(self.label_16, 1, 0, 1, 1)
self.label_13 = QtWidgets.QLabel(self.frame_6)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_13.sizePolicy().hasHeightForWidth())
self.label_13.setSizePolicy(sizePolicy)
self.label_13.setMinimumSize(QtCore.QSize(82, 17))
self.label_13.setObjectName("label_13")
self.gridLayout_2.addWidget(self.label_13, 2, 0, 1, 1)
self._cat = QtWidgets.QLabel(self.frame_6)
self._cat.setObjectName("_cat")
self.gridLayout_2.addWidget(self._cat, 5, 5, 1, 1)
self.label_52 = QtWidgets.QLabel(self.frame_6)
self.label_52.setObjectName("label_52")
self.gridLayout_2.addWidget(self.label_52, 5, 2, 1, 1)
self.label_57 = QtWidgets.QLabel(self.frame_6)
self.label_57.setObjectName("label_57")
self.gridLayout_2.addWidget(self.label_57, 3, 2, 1, 1)
self._cast = QtWidgets.QLabel(self.frame_6)
self._cast.setMinimumSize(QtCore.QSize(70, 17))
self._cast.setObjectName("_cast")
self.gridLayout_2.addWidget(self._cast, 5, 1, 1, 1)
self._fathername = QtWidgets.QLabel(self.frame_6)
self._fathername.setMinimumSize(QtCore.QSize(70, 17))
self._fathername.setObjectName("_fathername")
self.gridLayout_2.addWidget(self._fathername, 2, 1, 1, 1)
self.label_54 = QtWidgets.QLabel(self.frame_6)
self.label_54.setObjectName("label_54")
self.gridLayout_2.addWidget(self.label_54, 1, 2, 1, 1)
self.label_56 = QtWidgets.QLabel(self.frame_6)
self.label_56.setObjectName("label_56")
self.gridLayout_2.addWidget(self.label_56, 3, 0, 1, 1)
self._mothername = QtWidgets.QLabel(self.frame_6)
self._mothername.setObjectName("_mothername")
self.gridLayout_2.addWidget(self._mothername, 2, 3, 1, 1)
self.label_42 = QtWidgets.QLabel(self.frame_6)
self.label_42.setObjectName("label_42")
self.gridLayout_2.addWidget(self.label_42, 5, 4, 1, 1)
self.label_38 = QtWidgets.QLabel(self.frame_6)
self.label_38.setObjectName("label_38")
self.gridLayout_2.addWidget(self.label_38, 2, 4, 1, 1)
self._doa = QtWidgets.QLabel(self.frame_6)
self._doa.setObjectName("_doa")
self.gridLayout_2.addWidget(self._doa, 1, 5, 1, 1)
self._permaadd = QtWidgets.QLabel(self.frame_6)
self._permaadd.setObjectName("_permaadd")
self.gridLayout_2.addWidget(self._permaadd, 2, 5, 1, 1)
self.label_50 = QtWidgets.QLabel(self.frame_6)
self.label_50.setObjectName("label_50")
self.gridLayout_2.addWidget(self.label_50, 2, 2, 1, 1)
self._religion = QtWidgets.QLabel(self.frame_6)
self._religion.setObjectName("_religion")
self.gridLayout_2.addWidget(self._religion, 5, 3, 1, 1)
self._class = QtWidgets.QLabel(self.frame_6)
self._class.setMinimumSize(QtCore.QSize(70, 17))
self._class.setObjectName("_class")
self.gridLayout_2.addWidget(self._class, 1, 1, 1, 1)
self.label_22 = QtWidgets.QLabel(self.frame_6)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_22.sizePolicy().hasHeightForWidth())
self.label_22.setSizePolicy(sizePolicy)
self.label_22.setMinimumSize(QtCore.QSize(82, 17))
self.label_22.setObjectName("label_22")
self.gridLayout_2.addWidget(self.label_22, 5, 0, 1, 1)
self.label_58 = QtWidgets.QLabel(self.frame_6)
self.label_58.setObjectName("label_58")
self.gridLayout_2.addWidget(self.label_58, 1, 4, 1, 1)
self.label_41 = QtWidgets.QLabel(self.frame_6)
self.label_41.setObjectName("label_41")
self.gridLayout_2.addWidget(self.label_41, 3, 4, 1, 1)
self._phoneno = QtWidgets.QLabel(self.frame_6)
self._phoneno.setObjectName("_phoneno")
self.gridLayout_2.addWidget(self._phoneno, 3, 1, 1, 1)
self._nationality = QtWidgets.QLabel(self.frame_6)
self._nationality.setObjectName("_nationality")
self.gridLayout_2.addWidget(self._nationality, 3, 3, 1, 1)
self._corradd = QtWidgets.QLabel(self.frame_6)
self._corradd.setObjectName("_corradd")
self.gridLayout_2.addWidget(self._corradd, 3, 5, 1, 1)
self.verticalLayout_3.addWidget(self.frame_6)
self.horizontalLayout_10.addWidget(self.frame_11)
self.horizontalLayout_17.addWidget(self.frame_3, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.verticalLayout_2.addWidget(self.frame_25)
self.label_49 = QtWidgets.QLabel(self.userTab)
self.label_49.setText("")
self.label_49.setAlignment(QtCore.Qt.AlignCenter)
self.label_49.setObjectName("label_49")
self.verticalLayout_2.addWidget(self.label_49)
self.frame_12 = QtWidgets.QFrame(self.userTab)
self.frame_12.setStyleSheet("background:transparent;\n"
"")
self.frame_12.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_12.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_12.setObjectName("frame_12")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.frame_12)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.textEdit_2 = QtWidgets.QTextEdit(self.frame_12)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.textEdit_2.sizePolicy().hasHeightForWidth())
self.textEdit_2.setSizePolicy(sizePolicy)
self.textEdit_2.setMinimumSize(QtCore.QSize(0, 100))
self.textEdit_2.setMaximumSize(QtCore.QSize(16777215, 130))
font = QtGui.QFont()
font.setFamily("Segoe UI Light")
font.setPointSize(10)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.textEdit_2.setFont(font)
self.textEdit_2.setStyleSheet("QTextEdit {\n"
" background-color: rgb(45, 45, 45);\n"
" border-radius: 5px;\n"
" color:#fff;\n"
" font-family:\'Segoe UI Light\';\n"
" font-size:10pt;\n"
" font-weight:400; \n"
" font-style:normal;\n"
" padding-left: 10px;\n"
"}\n"
"QTextEdit:hover {\n"
" background-color: rgb(43, 43, 43);\n"
" border: 1px solid rgb(64, 71, 88);\n"
"}\n"
"QTextEdit:focus {\n"
" background-color: rgb(40, 40, 40);\n"
" border: 1px solid rgb(91, 101, 124);\n"
"}\n"
"\n"
"Qmenu{\n"
" \n"
" background-color: rgb(149, 149, 149);\n"
"}")
self.textEdit_2.setFrameShape(QtWidgets.QFrame.Box)
self.textEdit_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.textEdit_2.setTabChangesFocus(True)
self.textEdit_2.setDocumentTitle("")
self.textEdit_2.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByKeyboard|QtCore.Qt.LinksAccessibleByMouse|QtCore.Qt.TextBrowserInteraction|QtCore.Qt.TextEditable|QtCore.Qt.TextEditorInteraction|QtCore.Qt.TextSelectableByKeyboard|QtCore.Qt.TextSelectableByMouse)
self.textEdit_2.setObjectName("textEdit_2")
self.verticalLayout_4.addWidget(self.textEdit_2)
self.verticalLayout_2.addWidget(self.frame_12)
self.stackedWidget.addWidget(self.userTab)
self.loginTab = QtWidgets.QWidget()
self.loginTab.setObjectName("loginTab")
self.horizontalLayout_22 = QtWidgets.QHBoxLayout(self.loginTab)
self.horizontalLayout_22.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_22.setSpacing(0)
self.horizontalLayout_22.setObjectName("horizontalLayout_22")
self.frame_26 = QtWidgets.QFrame(self.loginTab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_26.sizePolicy().hasHeightForWidth())
self.frame_26.setSizePolicy(sizePolicy)
self.frame_26.setMinimumSize(QtCore.QSize(460, 0))
self.frame_26.setSizeIncrement(QtCore.QSize(400, 0))
self.frame_26.setStyleSheet("background-color: rgb(255, 82, 85);")
self.frame_26.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_26.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_26.setObjectName("frame_26")
self.verticalLayout_20 = QtWidgets.QVBoxLayout(self.frame_26)
self.verticalLayout_20.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_20.setSpacing(0)
self.verticalLayout_20.setObjectName("verticalLayout_20")
self.frame_39 = QtWidgets.QFrame(self.frame_26)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_39.sizePolicy().hasHeightForWidth())
self.frame_39.setSizePolicy(sizePolicy)
self.frame_39.setMinimumSize(QtCore.QSize(2, 2))
self.frame_39.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_39.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_39.setObjectName("frame_39")
self.verticalLayout_8 = QtWidgets.QVBoxLayout(self.frame_39)
self.verticalLayout_8.setObjectName("verticalLayout_8")
self.frame_43 = QtWidgets.QFrame(self.frame_39)
self.frame_43.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_43.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_43.setObjectName("frame_43")
self.verticalLayout_21 = QtWidgets.QVBoxLayout(self.frame_43)
self.verticalLayout_21.setObjectName("verticalLayout_21")
self.label_24 = QtWidgets.QLabel(self.frame_43)
self.label_24.setMaximumSize(QtCore.QSize(100, 120))
self.label_24.setText("")
#self.label_24.setPixmap(QtGui.QPixmap("D:\\all\\python projects\\python os\\ui\\Simple_PySide_Base-org\\e\\../../../../../../Program Files (x86)/Flux OS/Icons/User/Profession/doctor-1.png"))
self.label_24.setScaledContents(True)
self.label_24.setObjectName("label_24")
self.verticalLayout_21.addWidget(self.label_24)
self.verticalLayout_8.addWidget(self.frame_43, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.label_29 = QtWidgets.QLabel(self.frame_39)
font = QtGui.QFont()
font.setFamily("MoolBoran")
font.setPointSize(100)
self.label_29.setFont(font)
self.label_29.setStyleSheet("color: rgb(232, 206, 77);")
self.label_29.setObjectName("label_29")
self.verticalLayout_8.addWidget(self.label_29)
self.verticalLayout_20.addWidget(self.frame_39, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.horizontalLayout_22.addWidget(self.frame_26)
self.frame_32 = QtWidgets.QFrame(self.loginTab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_32.sizePolicy().hasHeightForWidth())
self.frame_32.setSizePolicy(sizePolicy)
self.frame_32.setMinimumSize(QtCore.QSize(350, 365))
self.frame_32.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_32.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_32.setObjectName("frame_32")
self.verticalLayout_44 = QtWidgets.QVBoxLayout(self.frame_32)
self.verticalLayout_44.setObjectName("verticalLayout_44")
self.frame_54 = QtWidgets.QFrame(self.frame_32)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_54.sizePolicy().hasHeightForWidth())
self.frame_54.setSizePolicy(sizePolicy)
self.frame_54.setStyleSheet("font: 25 11pt \"Segoe UI Light\";")
self.frame_54.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_54.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_54.setObjectName("frame_54")
self.verticalLayout_43 = QtWidgets.QVBoxLayout(self.frame_54)
self.verticalLayout_43.setSpacing(15)
self.verticalLayout_43.setObjectName("verticalLayout_43")
self.frame_56 = QtWidgets.QFrame(self.frame_54)
self.frame_56.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_56.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_56.setObjectName("frame_56")
self.verticalLayout_46 = QtWidgets.QVBoxLayout(self.frame_56)
self.verticalLayout_46.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_46.setSpacing(0)
self.verticalLayout_46.setObjectName("verticalLayout_46")
self.label_36 = QtWidgets.QLabel(self.frame_56)
self.label_36.setStyleSheet("font: 25 30pt \"Segoe UI Light\";")
self.label_36.setObjectName("label_36")
self.verticalLayout_46.addWidget(self.label_36)
self.verticalLayout_43.addWidget(self.frame_56, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignTop)
self.frame_45 = QtWidgets.QFrame(self.frame_54)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_45.sizePolicy().hasHeightForWidth())
self.frame_45.setSizePolicy(sizePolicy)
self.frame_45.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_45.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_45.setObjectName("frame_45")
self.verticalLayout_34 = QtWidgets.QVBoxLayout(self.frame_45)
self.verticalLayout_34.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_34.setSpacing(6)
self.verticalLayout_34.setObjectName("verticalLayout_34")
self.label_31 = QtWidgets.QLabel(self.frame_45)
self.label_31.setStyleSheet("font: 25 13pt \"Segoe UI Light\";")
self.label_31.setObjectName("label_31")
self.verticalLayout_34.addWidget(self.label_31, 0, QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.frame_55 = QtWidgets.QFrame(self.frame_45)
self.frame_55.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_55.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_55.setObjectName("frame_55")
self.verticalLayout_45 = QtWidgets.QVBoxLayout(self.frame_55)
self.verticalLayout_45.setContentsMargins(40, 0, 0, 0)
self.verticalLayout_45.setSpacing(0)
self.verticalLayout_45.setObjectName("verticalLayout_45")
self.lineEdit = QtWidgets.QLineEdit(self.frame_55)
self.lineEdit.setStyleSheet("QLineEdit{\n"
" background:transparent;\n"
" color:rgb(213, 213, 213);\n"
" border-bottom:1px solid rgb(0, 170, 255);\n"
" border-radius: 0px;\n"
"}\n"
"QLineEdit:focus{\n"
" background:transparent;\n"
" color:rgb(213, 213, 213);\n"
" border-bottom:1px solid rgb(55, 98, 255);\n"
" border-radius: 0px;\n"
"}")
self.lineEdit.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit.setObjectName("lineEdit")
self.verticalLayout_45.addWidget(self.lineEdit)
self.verticalLayout_34.addWidget(self.frame_55)
self.verticalLayout_43.addWidget(self.frame_45, 0, QtCore.Qt.AlignVCenter)
self.frame_47 = QtWidgets.QFrame(self.frame_54)
self.frame_47.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_47.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_47.setObjectName("frame_47")
self.verticalLayout_36 = QtWidgets.QVBoxLayout(self.frame_47)
self.verticalLayout_36.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_36.setSpacing(1)
self.verticalLayout_36.setObjectName("verticalLayout_36")
self.label_34 = QtWidgets.QLabel(self.frame_47)
self.label_34.setStyleSheet("font: 25 13pt \"Segoe UI Light\";")
self.label_34.setObjectName("label_34")
self.verticalLayout_36.addWidget(self.label_34, 0, QtCore.Qt.AlignLeft|QtCore.Qt.AlignBottom)
self.frame_57 = QtWidgets.QFrame(self.frame_47)
self.frame_57.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_57.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_57.setObjectName("frame_57")
self.verticalLayout_47 = QtWidgets.QVBoxLayout(self.frame_57)
self.verticalLayout_47.setContentsMargins(40, 0, 0, 0)
self.verticalLayout_47.setSpacing(0)
self.verticalLayout_47.setObjectName("verticalLayout_47")
self.lineEdit_4 = QtWidgets.QLineEdit(self.frame_57)
self.lineEdit_4.setStyleSheet("QLineEdit{\n"
" background:transparent;\n"
" color:rgb(213, 213, 213);\n"
" border-bottom:1px solid rgb(0, 170, 255);\n"
" border-radius: 0px;\n"
"}\n"
"QLineEdit:focus{\n"
" background:transparent;\n"
" color:rgb(213, 213, 213);\n"
" border-bottom:1px solid rgb(55, 98, 255);\n"
" border-radius: 0px;\n"
"}")
self.lineEdit_4.setEchoMode(QtWidgets.QLineEdit.PasswordEchoOnEdit)
self.lineEdit_4.setAlignment(QtCore.Qt.AlignCenter)
self.lineEdit_4.setObjectName("lineEdit_4")
self.verticalLayout_47.addWidget(self.lineEdit_4)
self.verticalLayout_36.addWidget(self.frame_57)
self.verticalLayout_43.addWidget(self.frame_47, 0, QtCore.Qt.AlignTop)
self.frame_58 = QtWidgets.QFrame(self.frame_54)
self.frame_58.setStyleSheet("color: rgb(85, 255, 127);")
self.frame_58.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_58.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_58.setObjectName("frame_58")
self.horizontalLayout_24 = QtWidgets.QHBoxLayout(self.frame_58)
self.horizontalLayout_24.setContentsMargins(0, 0, 0, -1)
self.horizontalLayout_24.setObjectName("horizontalLayout_24")
self.radioButton_4 = QtWidgets.QRadioButton(self.frame_58)
self.radioButton_4.setStyleSheet("QRadioButton::indicator {\n"
" border: 3px solid rgb(52, 59, 72);\n"
" width: 15px;\n"
" height: 15px;\n"
" border-radius: 10px;\n"
" background: rgb(44, 49, 60);\n"
"}\n"
"QRadioButton::indicator:hover {\n"
" border: 3px solid rgb(58, 66, 81);\n"
"}\n"
"QRadioButton::indicator:checked {\n"
" background: 3px solid rgb(0, 170, 255);\n"
" border: 3px solid rgb(52, 59, 72); \n"
"}")
self.radioButton_4.setObjectName("radioButton_4")
self.horizontalLayout_24.addWidget(self.radioButton_4, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.radioButton_5 = QtWidgets.QRadioButton(self.frame_58)
self.radioButton_5.setStyleSheet("QRadioButton::indicator {\n"
" border: 3px solid rgb(52, 59, 72);\n"
" width: 15px;\n"
" height: 15px;\n"
" border-radius: 10px;\n"
" background: rgb(44, 49, 60);\n"
"}\n"
"QRadioButton::indicator:hover {\n"
" border: 3px solid rgb(58, 66, 81);\n"
"}\n"
"QRadioButton::indicator:checked {\n"
" background: 3px solid rgb(0, 170, 255);\n"
" border: 3px solid rgb(52, 59, 72); \n"
"}")
self.radioButton_5.setObjectName("radioButton_5")
self.horizontalLayout_24.addWidget(self.radioButton_5, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignVCenter)
self.verticalLayout_43.addWidget(self.frame_58, 0, QtCore.Qt.AlignVCenter)
self.pushButton_4 = QtWidgets.QPushButton(self.frame_54)
self.pushButton_4.setMinimumSize(QtCore.QSize(100, 33))
self.pushButton_4.setStyleSheet("QPushButton{\n"
" color: rgb(102, 255, 204);\n"
" font: 25 16pt \"Segoe UI Light\";\n"
"}\n"
"\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
" color: rgb(225, 255, 225);\n"
" border:None;\n"
" font: 25 16pt \"Segoe UI Light\";\n"
"}\n"
"\n"
"QPushButton:pressed{\n"
" background-color: rgb(85, 170, 255);\n"
" color: rgb(225, 255, 225);\n"
" font: 25 16pt \"Segoe UI Light\";\n"
"}\n"
"")
self.pushButton_4.setObjectName("pushButton_4")
self.verticalLayout_43.addWidget(self.pushButton_4, 0, QtCore.Qt.AlignHCenter|QtCore.Qt.AlignBottom)
self.verticalLayout_44.addWidget(self.frame_54)
self.horizontalLayout_22.addWidget(self.frame_32)
self.stackedWidget.addWidget(self.loginTab)
self.linkTab = QtWidgets.QWidget()
self.linkTab.setObjectName("linkTab")
self.verticalLayout_14 = QtWidgets.QVBoxLayout(self.linkTab)
self.verticalLayout_14.setObjectName("verticalLayout_14")
self.frame_13 = QtWidgets.QFrame(self.linkTab)
self.frame_13.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_13.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_13.setObjectName("frame_13")
self.verticalLayout_6 = QtWidgets.QVBoxLayout(self.frame_13)
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.frame_22 = QtWidgets.QFrame(self.frame_13)
self.frame_22.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_22.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_22.setObjectName("frame_22")
self.verticalLayout_12 = QtWidgets.QVBoxLayout(self.frame_22)
self.verticalLayout_12.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_12.setSpacing(10)
self.verticalLayout_12.setObjectName("verticalLayout_12")
self.my_classes = QtWidgets.QFrame(self.frame_22)
self.my_classes.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.my_classes.setFrameShadow(QtWidgets.QFrame.Raised)
self.my_classes.setObjectName("my_classes")
self.verticalLayout_17 = QtWidgets.QVBoxLayout(self.my_classes)
self.verticalLayout_17.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_17.setSpacing(30)
self.verticalLayout_17.setObjectName("verticalLayout_17")
self.label_7 = QtWidgets.QLabel(self.my_classes)
font = QtGui.QFont()
font.setFamily("Segoe UI light")
font.setPointSize(35)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_7.setFont(font)
self.label_7.setStyleSheet("font: 35pt \"Segoe UI light\";\n"
"color: rgb(221, 51, 74);")
self.label_7.setObjectName("label_7")
self.verticalLayout_17.addWidget(self.label_7, 0, QtCore.Qt.AlignHCenter)
self.scrollArea_5 = QtWidgets.QScrollArea(self.my_classes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollArea_5.sizePolicy().hasHeightForWidth())
self.scrollArea_5.setSizePolicy(sizePolicy)
self.scrollArea_5.setMinimumSize(QtCore.QSize(400, 266))
self.scrollArea_5.setStyleSheet("QScrollBar:horizontal {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" height: 14px;\n"
" margin: 0px 21px 0 21px;\n"
" border-radius: 0px;\n"
"}\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" width: 14px;\n"
" margin: 21px 0 21px 0;\n"
" border-radius: 0px;\n"
" }")
self.scrollArea_5.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.scrollArea_5.setWidgetResizable(True)
self.scrollArea_5.setAlignment(QtCore.Qt.AlignCenter)
self.scrollArea_5.setObjectName("scrollArea_5")
self.scrollAreaWidgetContents_5 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_5.setGeometry(QtCore.QRect(0, 0, 867, 266))
self.scrollAreaWidgetContents_5.setObjectName("scrollAreaWidgetContents_5")
self.verticalLayout_15 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_5)
self.verticalLayout_15.setObjectName("verticalLayout_15")
self.View = QtWidgets.QTableView(self.scrollAreaWidgetContents_5)
self.View.setStyleSheet("QTableView {\n"
" color: rgb(222, 222, 222); \n"
" background-color: rgb(40, 40, 40);\n"
" padding: 10px;\n"
" border-radius: 5px;\n"
" gridline-color: rgb(44, 49, 60);\n"
" border-bottom: 1px solid rgb(44, 49, 60);\n"
"}\n"
"QTableView::item{\n"
" border-color: rgb(44, 49, 60);\n"
" padding-left: 5px;\n"
" padding-right: 5px;\n"
" gridline-color: rgb(44, 49, 60);\n"
"}\n"
"QTableView::item:selected{\n"
" background-color: rgb(85, 170, 255);\n"
"}\n"
"QScrollBar:horizontal {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" height: 14px;\n"
" margin: 0px 21px 0 21px;\n"
" border-radius: 0px;\n"
"}\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" width: 14px;\n"
" margin: 21px 0 21px 0;\n"
" border-radius: 0px;\n"
" }\n"
"QHeaderView::section{\n"
" color: rgb(222, 222, 222);\n"
" Background-color: rgb(39, 44, 54);\n"
" max-width: 30px;\n"
" border: 1px solid rgb(44, 49, 60);\n"
" border-style: none;\n"
" border-bottom: 1px solid rgb(44, 49, 60);\n"
" border-right: 1px solid rgb(44, 49, 60);\n"
"}\n"
"QTableView::horizontalHeader {\n"
" color: rgb(222, 222, 222); \n"
" background-color: rgb(81, 255, 0);\n"
"}\n"
"QHeaderView::section:horizontal\n"
"{\n"
" color: rgb(222, 222, 222);\n"
" border: 1px solid rgb(32, 34, 42);\n"
" background-color: rgb(27, 29, 35);\n"
" padding: 3px;\n"
" border-top-left-radius: 7px;\n"
" border-top-right-radius: 7px;\n"
"}\n"
"QHeaderView::section:vertical\n"
"{\n"
" color: rgb(222, 222, 222);\n"
" border: 1px solid rgb(44, 49, 60);\n"
"}\n"
"")
self.View.setObjectName("View")
self.verticalLayout_15.addWidget(self.View)
self.scrollArea_5.setWidget(self.scrollAreaWidgetContents_5)
self.verticalLayout_17.addWidget(self.scrollArea_5)
self.verticalLayout_12.addWidget(self.my_classes, 0, QtCore.Qt.AlignVCenter)
self.verticalLayout_6.addWidget(self.frame_22)
self.textEdit_3 = QtWidgets.QTextEdit(self.frame_13)
self.textEdit_3.setStyleSheet("QTextEdit {\n"
" background-color: rgb(45, 45, 45);\n"
" border-radius: 5px;\n"
" color:#fff;\n"
" font-family:\'Segoe UI Light\';\n"
" font-size:10pt;\n"
" font-weight:400; \n"
" font-style:normal;\n"
" padding-left: 10px;\n"
"}\n"
"QTextEdit:hover {\n"
" background-color: rgb(43, 43, 43);\n"
" border: 1px solid rgb(64, 71, 88);\n"
"}\n"
"QTextEdit:focus {\n"
" background-color: rgb(40, 40, 40);\n"
" border: 1px solid rgb(91, 101, 124);\n"
"}\n"
"\n"
"Qmenu{\n"
" \n"
" background-color: rgb(149, 149, 149);\n"
"}")
self.textEdit_3.setObjectName("textEdit_3")
self.verticalLayout_6.addWidget(self.textEdit_3, 0, QtCore.Qt.AlignBottom)
self.verticalLayout_14.addWidget(self.frame_13)
self.stackedWidget.addWidget(self.linkTab)
self.assigTab = QtWidgets.QWidget()
self.assigTab.setObjectName("assigTab")
self.verticalLayout_10 = QtWidgets.QVBoxLayout(self.assigTab)
self.verticalLayout_10.setObjectName("verticalLayout_10")
self.frame_15 = QtWidgets.QFrame(self.assigTab)
self.frame_15.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_15.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_15.setObjectName("frame_15")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.frame_15)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.frame_23 = QtWidgets.QFrame(self.frame_15)
self.frame_23.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_23.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_23.setObjectName("frame_23")
self.verticalLayout_13 = QtWidgets.QVBoxLayout(self.frame_23)
self.verticalLayout_13.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_13.setSpacing(10)
self.verticalLayout_13.setObjectName("verticalLayout_13")
self.my_classes_2 = QtWidgets.QFrame(self.frame_23)
self.my_classes_2.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.my_classes_2.setFrameShadow(QtWidgets.QFrame.Raised)
self.my_classes_2.setObjectName("my_classes_2")
self.verticalLayout_18 = QtWidgets.QVBoxLayout(self.my_classes_2)
self.verticalLayout_18.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_18.setSpacing(30)
self.verticalLayout_18.setObjectName("verticalLayout_18")
self.label_8 = QtWidgets.QLabel(self.my_classes_2)
font = QtGui.QFont()
font.setFamily("Segoe UI light")
font.setPointSize(35)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.label_8.setFont(font)
self.label_8.setStyleSheet("font: 35pt \"Segoe UI light\";\n"
"color: rgb(221, 51, 74);")
self.label_8.setObjectName("label_8")
self.verticalLayout_18.addWidget(self.label_8, 0, QtCore.Qt.AlignHCenter)
self.scrollArea_6 = QtWidgets.QScrollArea(self.my_classes_2)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scrollArea_6.sizePolicy().hasHeightForWidth())
self.scrollArea_6.setSizePolicy(sizePolicy)
self.scrollArea_6.setMinimumSize(QtCore.QSize(400, 266))
self.scrollArea_6.setStyleSheet("QScrollBar:horizontal {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" height: 14px;\n"
" margin: 0px 21px 0 21px;\n"
" border-radius: 0px;\n"
"}\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" width: 14px;\n"
" margin: 21px 0 21px 0;\n"
" border-radius: 0px;\n"
" }")
self.scrollArea_6.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAsNeeded)
self.scrollArea_6.setWidgetResizable(True)
self.scrollArea_6.setAlignment(QtCore.Qt.AlignCenter)
self.scrollArea_6.setObjectName("scrollArea_6")
self.scrollAreaWidgetContents_6 = QtWidgets.QWidget()
self.scrollAreaWidgetContents_6.setGeometry(QtCore.QRect(0, 0, 867, 266))
self.scrollAreaWidgetContents_6.setObjectName("scrollAreaWidgetContents_6")
self.verticalLayout_16 = QtWidgets.QVBoxLayout(self.scrollAreaWidgetContents_6)
self.verticalLayout_16.setObjectName("verticalLayout_16")
self.tableView = QtWidgets.QTableView(self.scrollAreaWidgetContents_6)
self.tableView.setStyleSheet("QTableView {\n"
" color: rgb(222, 222, 222); \n"
" background-color: rgb(40, 40, 40);\n"
" padding: 10px;\n"
" border-radius: 5px;\n"
" gridline-color: rgb(44, 49, 60);\n"
" border-bottom: 1px solid rgb(44, 49, 60);\n"
"}\n"
"QTableView::item{\n"
" border-color: rgb(44, 49, 60);\n"
" padding-left: 5px;\n"
" padding-right: 5px;\n"
" gridline-color: rgb(44, 49, 60);\n"
"}\n"
"QTableView::item:selected{\n"
" background-color: rgb(85, 170, 255);\n"
"}\n"
"QScrollBar:horizontal {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" height: 14px;\n"
" margin: 0px 21px 0 21px;\n"
" border-radius: 0px;\n"
"}\n"
" QScrollBar:vertical {\n"
" border: none;\n"
" background: rgb(52, 59, 72);\n"
" width: 14px;\n"
" margin: 21px 0 21px 0;\n"
" border-radius: 0px;\n"
" }\n"
"QHeaderView::section{\n"
" color: rgb(222, 222, 222);\n"
" Background-color: rgb(39, 44, 54);\n"
" max-width: 30px;\n"
" border: 1px solid rgb(44, 49, 60);\n"
" border-style: none;\n"
" border-bottom: 1px solid rgb(44, 49, 60);\n"
" border-right: 1px solid rgb(44, 49, 60);\n"
"}\n"
"QTableView::horizontalHeader {\n"
" color: rgb(222, 222, 222); \n"
" background-color: rgb(81, 255, 0);\n"
"}\n"
"QHeaderView::section:horizontal\n"
"{\n"
" color: rgb(222, 222, 222);\n"
" border: 1px solid rgb(32, 34, 42);\n"
" background-color: rgb(27, 29, 35);\n"
" padding: 3px;\n"
" border-top-left-radius: 7px;\n"
" border-top-right-radius: 7px;\n"
"}\n"
"QHeaderView::section:vertical\n"
"{\n"
" color: rgb(222, 222, 222);\n"
" border: 1px solid rgb(44, 49, 60);\n"
"}\n"
"")
self.tableView.setEditTriggers(QtWidgets.QAbstractItemView.AnyKeyPressed|QtWidgets.QAbstractItemView.DoubleClicked|QtWidgets.QAbstractItemView.EditKeyPressed|QtWidgets.QAbstractItemView.SelectedClicked)
self.tableView.setObjectName("tableView")
self.tableView.verticalHeader().setVisible(False)
self.verticalLayout_16.addWidget(self.tableView)
self.scrollArea_6.setWidget(self.scrollAreaWidgetContents_6)
self.verticalLayout_18.addWidget(self.scrollArea_6)
self.verticalLayout_13.addWidget(self.my_classes_2, 0, QtCore.Qt.AlignVCenter)
self.verticalLayout_7.addWidget(self.frame_23)
self.textEdit_4 = QtWidgets.QTextEdit(self.frame_15)
self.textEdit_4.setStyleSheet("QTextEdit {\n"
" background-color: rgb(45, 45, 45);\n"
" border-radius: 5px;\n"
" color:#fff;\n"
" font-family:\'Segoe UI Light\';\n"
" font-size:10pt;\n"
" font-weight:400; \n"
" font-style:normal;\n"
" padding-left: 10px;\n"
"}\n"
"QTextEdit:hover {\n"
" background-color: rgb(43, 43, 43);\n"
" border: 1px solid rgb(64, 71, 88);\n"
"}\n"
"QTextEdit:focus {\n"
" background-color: rgb(40, 40, 40);\n"
" border: 1px solid rgb(91, 101, 124);\n"
"}\n"
"\n"
"Qmenu{\n"
" \n"
" background-color: rgb(149, 149, 149);\n"
"}")
self.textEdit_4.setObjectName("textEdit_4")
self.verticalLayout_7.addWidget(self.textEdit_4, 0, QtCore.Qt.AlignBottom)
self.verticalLayout_10.addWidget(self.frame_15)
self.stackedWidget.addWidget(self.assigTab)
self.horizontalLayout.addWidget(self.stackedWidget)
self.verticalLayout.addWidget(self.frame_9)
self.frame_8 = QtWidgets.QFrame(self.frame)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_8.sizePolicy().hasHeightForWidth())
self.frame_8.setSizePolicy(sizePolicy)
self.frame_8.setMinimumSize(QtCore.QSize(0, 18))
font = QtGui.QFont()
font.setFamily("Segoe UI Semilight")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.frame_8.setFont(font)
self.frame_8.setStyleSheet("color: rgb(98, 103, 111);\n"
"background-color: rgb(25, 25, 25);")
self.frame_8.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_8.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_8.setObjectName("frame_8")
self.horizontalLayout_7 = QtWidgets.QHBoxLayout(self.frame_8)
self.horizontalLayout_7.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_7.setSpacing(0)
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.label_credits = QtWidgets.QLabel(self.frame_8)
self.label_credits.setMaximumSize(QtCore.QSize(160, 16777215))
font = QtGui.QFont()
font.setFamily("Segoe UI")
self.label_credits.setFont(font)
self.label_credits.setStyleSheet("color: rgb(98, 103, 111);\n"
"background-color: rgb(25, 25, 25);")
self.label_credits.setFrameShadow(QtWidgets.QFrame.Raised)
self.label_credits.setIndent(5)
self.label_credits.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.label_credits.setObjectName("label_credits")
self.horizontalLayout_7.addWidget(self.label_credits)
self.pushButton_7 = QtWidgets.QPushButton(self.frame_8)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_7.sizePolicy().hasHeightForWidth())
self.pushButton_7.setSizePolicy(sizePolicy)
self.pushButton_7.setMaximumSize(QtCore.QSize(50, 16777215))
self.pushButton_7.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
self.pushButton_7.setText("")
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-exit-to-app.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_7.setIcon(icon4)
self.pushButton_7.setIconSize(QtCore.QSize(16, 16))
self.pushButton_7.setObjectName("pushButton_7")
self.horizontalLayout_7.addWidget(self.pushButton_7)
self.label_3 = QtWidgets.QLabel(self.frame_8)
self.label_3.setObjectName("label_3")
self.horizontalLayout_7.addWidget(self.label_3, 0, QtCore.Qt.AlignRight|QtCore.Qt.AlignVCenter)
self.pushButton_5 = QtWidgets.QPushButton(self.frame_8)
self.pushButton_5.setText("")
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(os.path.abspath("./icons/window/cil-size-grip.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_5.setIcon(icon5)
self.pushButton_5.setObjectName("pushButton_5")
self.horizontalLayout_7.addWidget(self.pushButton_5, 0, QtCore.Qt.AlignRight)
self.verticalLayout.addWidget(self.frame_8)
self.verticalLayout_9.addWidget(self.frame)
self.retranslateUi(Form)
self.stackedWidget.setCurrentIndex(1)
self.lineEdit.returnPressed.connect(self.lineEdit_4.setFocus)
self.lineEdit_4.returnPressed.connect(self.radioButton_4.setFocus)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Form"))
self.label_2.setText(_translate("Form", "<html><head/><body><p><span style=\" color:#eaeaea;\">Accevate Technologies</span></p></body></html>"))
self.label.setText(_translate("Form", "<html><head/><body><p> CODE IT</p></body></html>"))
self.label_credits_4.setText(_translate("Form", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt;\">This product\'s content is purely licenced to "Accevate Technologies"</span></p></body></html>"))
self.label_credits_3.setText(_translate("Form", "<html><head/><body><p align=\"center\"><span style=\" font-size:9pt;\">this product\'s is under CC BY licence to Abhiraj Ranjan and Roshita Kaushik</span></p></body></html>"))
self.textEdit.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Creative Commons Attribution 4.0 International Public License</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">By exercising the Licensed Rights (defined below), You accept and agree</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">to be bound by the terms and conditions of this Creative Commons</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Attribution 4.0 International Public License ("Public License"). To the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">extent this Public License may be interpreted as a contract, You are</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">granted the Licensed Rights in consideration of Your acceptance of</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">these terms and conditions, and the Licensor grants You such rights in</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">consideration of benefits the Licensor receives from making the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Licensed Material available under these terms and conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 1 -- Definitions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. Adapted Material means material subject to Copyright and Similar</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights that is derived from or based upon the Licensed Material</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> and in which the Licensed Material is translated, altered,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> arranged, transformed, or otherwise modified in a manner requiring</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> permission under the Copyright and Similar Rights held by the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensor. For purposes of this Public License, where the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Material is a musical work, performance, or sound recording,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Adapted Material is always produced where the Licensed Material is</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> synched in timed relation with a moving image.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. Adapter\'s License means the license You apply to Your Copyright</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> and Similar Rights in Your contributions to Adapted Material in</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> accordance with the terms and conditions of this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. Copyright and Similar Rights means copyright and/or similar rights</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> closely related to copyright including, without limitation,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> performance, broadcast, sound recording, and Sui Generis Database</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights, without regard to how the rights are labeled or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> categorized. For purposes of this Public License, the rights</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> specified in Section 2(b)(1)-(2) are not Copyright and Similar</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> d. Effective Technological Measures means those measures that, in the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> absence of proper authority, may not be circumvented under laws</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> fulfilling obligations under Article 11 of the WIPO Copyright</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Treaty adopted on December 20, 1996, and/or similar international</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> agreements.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> e. Exceptions and Limitations means fair use, fair dealing, and/or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> any other exception or limitation to Copyright and Similar Rights</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> that applies to Your use of the Licensed Material.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> f. Licensed Material means the artistic or literary work, database,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> or other material to which the Licensor applied this Public</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> g. Licensed Rights means the rights granted to You subject to the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> terms and conditions of this Public License, which are limited to</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> all Copyright and Similar Rights that apply to Your use of the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Material and that the Licensor has authority to license.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> h. Licensor means the individual(s) or entity(ies) granting rights</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> under this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> i. Share means to provide material to the public by any means or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> process that requires permission under the Licensed Rights, such</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> as reproduction, public display, public performance, distribution,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> dissemination, communication, or importation, and to make material</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> available to the public including in ways that members of the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> public may access the material from a place and at a time</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> individually chosen by them.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> j. Sui Generis Database Rights means rights other than copyright</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> resulting from Directive 96/9/EC of the European Parliament and of</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> the Council of 11 March 1996 on the legal protection of databases,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> as amended and/or succeeded, as well as other essentially</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> equivalent rights anywhere in the world.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> k. You means the individual or entity exercising the Licensed Rights</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> under this Public License. Your has a corresponding meaning.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 2 -- Scope.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. License grant.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 1. Subject to the terms and conditions of this Public License,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> the Licensor hereby grants You a worldwide, royalty-free,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> non-sublicensable, non-exclusive, irrevocable license to</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> exercise the Licensed Rights in the Licensed Material to:</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. reproduce and Share the Licensed Material, in whole or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> in part; and</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. produce, reproduce, and Share Adapted Material.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 2. Exceptions and Limitations. For the avoidance of doubt, where</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Exceptions and Limitations apply to Your use, this Public</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> License does not apply, and You do not need to comply with</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> its terms and conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 3. Term. The term of this Public License is specified in Section</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 6(a).</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 4. Media and formats; technical modifications allowed. The</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensor authorizes You to exercise the Licensed Rights in</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> all media and formats whether now known or hereafter created,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> and to make technical modifications necessary to do so. The</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensor waives and/or agrees not to assert any right or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> authority to forbid You from making technical modifications</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> necessary to exercise the Licensed Rights, including</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> technical modifications necessary to circumvent Effective</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Technological Measures. For purposes of this Public License,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> simply making modifications authorized by this Section 2(a)</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> (4) never produces Adapted Material.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 5. Downstream recipients.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. Offer from the Licensor -- Licensed Material. Every</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> recipient of the Licensed Material automatically</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> receives an offer from the Licensor to exercise the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Rights under the terms and conditions of this</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. No downstream restrictions. You may not offer or impose</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> any additional or different terms or conditions on, or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> apply any Effective Technological Measures to, the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Material if doing so restricts exercise of the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Rights by any recipient of the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Material.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 6. No endorsement. Nothing in this Public License constitutes or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> may be construed as permission to assert or imply that You</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> are, or that Your use of the Licensed Material is, connected</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> with, or sponsored, endorsed, or granted official status by,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> the Licensor or others designated to receive attribution as</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> provided in Section 3(a)(1)(A)(i).</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. Other rights.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 1. Moral rights, such as the right of integrity, are not</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> licensed under this Public License, nor are publicity,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> privacy, and/or other similar personality rights; however, to</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> the extent possible, the Licensor waives and/or agrees not to</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> assert any such rights held by the Licensor to the limited</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> extent necessary to allow You to exercise the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights, but not otherwise.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 2. Patent and trademark rights are not licensed under this</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 3. To the extent possible, the Licensor waives any right to</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> collect royalties from You for the exercise of the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights, whether directly or through a collecting society</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> under any voluntary or waivable statutory or compulsory</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> licensing scheme. In all other cases the Licensor expressly</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> reserves any right to collect such royalties.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 3 -- License Conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Your exercise of the Licensed Rights is expressly made subject to the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">following conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. Attribution.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 1. If You Share the Licensed Material (including in modified</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> form), You must:</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. retain the following if it is supplied by the Licensor</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> with the Licensed Material:</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> i. identification of the creator(s) of the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Material and any others designated to receive</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> attribution, in any reasonable manner requested by</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> the Licensor (including by pseudonym if</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> designated);</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> ii. a copyright notice;</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> iii. a notice that refers to this Public License;</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> iv. a notice that refers to the disclaimer of</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> warranties;</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> v. a URI or hyperlink to the Licensed Material to the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> extent reasonably practicable;</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. indicate if You modified the Licensed Material and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> retain an indication of any previous modifications; and</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. indicate the Licensed Material is licensed under this</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Public License, and include the text of, or the URI or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> hyperlink to, this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 2. You may satisfy the conditions in Section 3(a)(1) in any</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> reasonable manner based on the medium, means, and context in</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> which You Share the Licensed Material. For example, it may be</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> reasonable to satisfy the conditions by providing a URI or</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> hyperlink to a resource that includes the required</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> information.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 3. If requested by the Licensor, You must remove any of the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> information required by Section 3(a)(1)(A) to the extent</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> reasonably practicable.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 4. If You Share Adapted Material You produce, the Adapter\'s</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> License You apply must not prevent recipients of the Adapted</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Material from complying with this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 4 -- Sui Generis Database Rights.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Where the Licensed Rights include Sui Generis Database Rights that</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">apply to Your use of the Licensed Material:</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. for the avoidance of doubt, Section 2(a)(1) grants You the right</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> to extract, reuse, reproduce, and Share all or a substantial</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> portion of the contents of the database;</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. if You include all or a substantial portion of the database</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> contents in a database in which You have Sui Generis Database</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights, then the database in which You have Sui Generis Database</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Rights (but not its individual contents) is Adapted Material; and</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. You must comply with the conditions in Section 3(a) if You Share</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> all or a substantial portion of the contents of the database.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">For the avoidance of doubt, this Section 4 supplements and does not</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">replace Your obligations under this Public License where the Licensed</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Rights include other Copyright and Similar Rights.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 5 -- Disclaimer of Warranties and Limitation of Liability.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. The disclaimer of warranties and limitation of liability provided</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> above shall be interpreted in a manner that, to the extent</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> possible, most closely approximates an absolute disclaimer and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> waiver of all liability.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 6 -- Term and Termination.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. This Public License applies for the term of the Copyright and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Similar Rights licensed here. However, if You fail to comply with</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> this Public License, then Your rights under this Public License</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> terminate automatically.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. Where Your right to use the Licensed Material has terminated under</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Section 6(a), it reinstates:</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 1. automatically as of the date the violation is cured, provided</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> it is cured within 30 days of Your discovery of the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> violation; or</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> 2. upon express reinstatement by the Licensor.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> For the avoidance of doubt, this Section 6(b) does not affect any</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> right the Licensor may have to seek remedies for Your violations</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> of this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. For the avoidance of doubt, the Licensor may also offer the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Material under separate terms or conditions or stop</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> distributing the Licensed Material at any time; however, doing so</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> will not terminate this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> d. Sections 1, 5, 6, 7, and 8 survive termination of this Public</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 7 -- Other Terms and Conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. The Licensor shall not be bound by any additional or different</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> terms or conditions communicated by You unless expressly agreed.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. Any arrangements, understandings, or agreements regarding the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensed Material not stated herein are separate from and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> independent of the terms and conditions of this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\">Section 8 -- Interpretation.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> a. For the avoidance of doubt, this Public License does not, and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> shall not be interpreted to, reduce, limit, restrict, or impose</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> conditions on any use of the Licensed Material that could lawfully</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> be made without permission under this Public License.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> b. To the extent possible, if any provision of this Public License is</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> deemed unenforceable, it shall be automatically reformed to the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> minimum extent necessary to make it enforceable. If the provision</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> cannot be reformed, it shall be severed from this Public License</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> without affecting the enforceability of the remaining terms and</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> conditions.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> c. No term or condition of this Public License will be waived and no</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> failure to comply consented to unless expressly agreed to by the</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> Licensor.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> d. Nothing in this Public License constitutes or may be interpreted</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> as a limitation upon, or waiver of, any privileges and immunities</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> that apply to the Licensor or You, including from the legal</span></p>\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:8pt; color:#9f9f9f;\"> processes of any jurisdiction or authority.</span></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p>\n"
"<p align=\"center\" style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-size:8pt; color:#9f9f9f;\"><br /></p></body></html>"))
self.pushButton_10.setText(_translate("Form", "time table"))
self.pushButton_6.setText(_translate("Form", "attendence"))
self.pushButton_8.setText(_translate("Form", "daily homework"))
self._studentname.setText(_translate("Form", "ABHIRAJ RANJAN"))
self._dob.setToolTip(_translate("Form", "<html><head/><body><p><br/></p></body></html>"))
self._dob.setText(_translate("Form", "TextLabel"))
self.label_16.setText(_translate("Form", "Class"))
self.label_13.setText(_translate("Form", "Father\'s Name"))
self._cat.setText(_translate("Form", "TextLabel"))
self.label_52.setText(_translate("Form", "Religion"))
self.label_57.setText(_translate("Form", "Nationality"))
self._cast.setText(_translate("Form", "TextLabel"))
self._fathername.setText(_translate("Form", "TextLabel"))
self.label_54.setText(_translate("Form", "DOB"))
self.label_56.setText(_translate("Form", "Mobile No."))
self._mothername.setText(_translate("Form", "TextLabel"))
self.label_42.setText(_translate("Form", "Category"))
self.label_38.setText(_translate("Form", "perma. Address"))
self._doa.setText(_translate("Form", "TextLabel"))
self._permaadd.setText(_translate("Form", "TextLabel"))
self.label_50.setText(_translate("Form", "Mother\'s name"))
self._religion.setText(_translate("Form", "TextLabel"))
self._class.setText(_translate("Form", "TextLabel"))
self.label_22.setText(_translate("Form", "Cast"))
self.label_58.setText(_translate("Form", "DOA"))
self.label_41.setText(_translate("Form", "corr. Address"))
self._phoneno.setText(_translate("Form", "TextLabel"))
self._nationality.setText(_translate("Form", "TextLabel"))
self._corradd.setText(_translate("Form", "TextLabel"))
self.textEdit_2.setToolTip(_translate("Form", "notepad"))
self.textEdit_2.setHtml(_translate("Form", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Segoe UI Light\'; font-size:10pt; font-weight:400; font-style:normal;\">\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.textEdit_2.setPlaceholderText(_translate("Form", "your personal notepad"))
self.label_29.setText(_translate("Form", "<html><head/><body><p align=\"center\"><span style=\" font-size:26pt;\">Vishwa Bharti Public School Dwarka</span></p></body></html>"))
self.label_36.setText(_translate("Form", "<html><head/><body><p><span style=\" color:#f74f52;\">Login</span></p></body></html>"))
self.label_31.setText(_translate("Form", "<html><head/><body><p><span style=\" color:#55ff7f;\">Name</span></p></body></html>"))
self.label_34.setText(_translate("Form", "<html><head/><body><p><span style=\" color:#55ff7f;\">Password</span></p></body></html>"))
self.radioButton_4.setText(_translate("Form", "Students"))
self.radioButton_5.setText(_translate("Form", "Teachers"))
self.pushButton_4.setText(_translate("Form", "Submit"))
self.label_7.setText(_translate("Form", "My Classes"))
self.label_8.setText(_translate("Form", "My Assignments"))
self.label_credits.setText(_translate("Form", "<html><head/><body><p>Logged in as </p></body></html>"))
self.pushButton_7.setToolTip(_translate("Form", "logout"))
self.label_3.setText(_translate("Form", "Made with <3 by Abhiraj Ranjan and Roshita Kaushik"))
| 82.892513
| 282
| 0.680141
|
cf0dd6cbbaca6b19028e127983a8afb5345ba650
| 8,865
|
py
|
Python
|
tensorflow/mnist.py
|
ColumbiaDVMM/Heated_Up_Softmax_Embedding
|
cb62d28e5faaf7fdb134b31c461125e3fef50d06
|
[
"BSD-2-Clause"
] | 44
|
2018-09-13T01:26:47.000Z
|
2022-01-23T17:15:50.000Z
|
tensorflow/mnist.py
|
mangye16/Heated_Up_Softmax_Embedding
|
cb62d28e5faaf7fdb134b31c461125e3fef50d06
|
[
"BSD-2-Clause"
] | null | null | null |
tensorflow/mnist.py
|
mangye16/Heated_Up_Softmax_Embedding
|
cb62d28e5faaf7fdb134b31c461125e3fef50d06
|
[
"BSD-2-Clause"
] | 13
|
2018-09-13T02:36:03.000Z
|
2021-01-12T10:55:29.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for downloading and reading MNIST data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import numpy
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.contrib.learn.python.learn.datasets import base
from tensorflow.python.framework import dtypes
SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/'
def _read32(bytestream):
dt = numpy.dtype(numpy.uint32).newbyteorder('>')
return numpy.frombuffer(bytestream.read(4), dtype=dt)[0]
def extract_images(f):
"""Extract the images into a 4D uint8 numpy array [index, y, x, depth].
Args:
f: A file object that can be passed into a gzip reader.
Returns:
data: A 4D uint8 numpy array [index, y, x, depth].
Raises:
ValueError: If the bytestream does not start with 2051.
"""
print('Extracting', f.name)
with gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST image file: %s' %
(magic, f.name))
num_images = _read32(bytestream)
rows = _read32(bytestream)
cols = _read32(bytestream)
buf = bytestream.read(rows * cols * num_images)
data = numpy.frombuffer(buf, dtype=numpy.uint8)
data = data.reshape(num_images, rows, cols, 1)
return data
def dense_to_one_hot(labels_dense, num_classes):
"""Convert class labels from scalars to one-hot vectors."""
num_labels = labels_dense.shape[0]
index_offset = numpy.arange(num_labels) * num_classes
labels_one_hot = numpy.zeros((num_labels, num_classes))
labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1
return labels_one_hot
def extract_labels(f, one_hot=False, num_classes=10):
"""Extract the labels into a 1D uint8 numpy array [index].
Args:
f: A file object that can be passed into a gzip reader.
one_hot: Does one hot encoding for the result.
num_classes: Number of classes for the one hot encoding.
Returns:
labels: a 1D uint8 numpy array.
Raises:
ValueError: If the bystream doesn't start with 2049.
"""
print('Extracting', f.name)
with gzip.GzipFile(fileobj=f) as bytestream:
magic = _read32(bytestream)
if magic != 2049:
raise ValueError('Invalid magic number %d in MNIST label file: %s' %
(magic, f.name))
num_items = _read32(bytestream)
buf = bytestream.read(num_items)
labels = numpy.frombuffer(buf, dtype=numpy.uint8)
if one_hot:
return dense_to_one_hot(labels, num_classes)
return labels
class DataSet(object):
def __init__(self,
images,
labels,
fake_data=False,
one_hot=False,
dtype=dtypes.float32,
reshape=True):
"""Construct a DataSet.
one_hot arg is used only if fake_data is true. `dtype` can be either
`uint8` to leave the input as `[0, 255]`, or `float32` to rescale into
`[0, 1]`.
"""
dtype = dtypes.as_dtype(dtype).base_dtype
if dtype not in (dtypes.uint8, dtypes.float32):
raise TypeError('Invalid image dtype %r, expected uint8 or float32' %
dtype)
if fake_data:
self._num_examples = 10000
self.one_hot = one_hot
else:
assert images.shape[0] == labels.shape[0], (
'images.shape: %s labels.shape: %s' % (images.shape, labels.shape))
self._num_examples = images.shape[0]
# Convert shape from [num examples, rows, columns, depth]
# to [num examples, rows*columns] (assuming depth == 1)
if reshape:
assert images.shape[3] == 1
images = images.reshape(images.shape[0],
images.shape[1] * images.shape[2])
if dtype == dtypes.float32:
# Convert from [0, 255] -> [0.0, 1.0].
images = images.astype(numpy.float32)
images = numpy.multiply(images, 1.0 / 255.0)
images = images*2.0 - 1.0
self._images = images
self._labels = labels
self._epochs_completed = 0
self._index_in_epoch = 0
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
def next_batch(self, batch_size, fake_data=False):
"""Return the next `batch_size` examples from this data set."""
if fake_data:
fake_image = [1] * 784
if self.one_hot:
fake_label = [1] + [0] * 9
else:
fake_label = 0
return [fake_image for _ in xrange(batch_size)], [
fake_label for _ in xrange(batch_size)
]
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
perm = numpy.arange(self._num_examples)
numpy.random.shuffle(perm)
self._images = self._images[perm]
self._labels = self._labels[perm]
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._images[start:end], self._labels[start:end]
def next_batch_test(self, batch_size, fake_data=False):
"""Return the next `batch_size` examples from this data set."""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
return self._images[start:end], self._labels[start:end]
def read_data_sets(train_dir,
fake_data=False,
one_hot=False,
dtype=dtypes.float32,
reshape=True,
validation_size=5000):
if fake_data:
def fake():
return DataSet([], [], fake_data=True, one_hot=one_hot, dtype=dtype)
train = fake()
validation = fake()
test = fake()
return base.Datasets(train=train, validation=validation, test=test)
TRAIN_IMAGES = 'train-images-idx3-ubyte.gz'
TRAIN_LABELS = 'train-labels-idx1-ubyte.gz'
TEST_IMAGES = 't10k-images-idx3-ubyte.gz'
TEST_LABELS = 't10k-labels-idx1-ubyte.gz'
local_file = base.maybe_download(TRAIN_IMAGES, train_dir,
SOURCE_URL + TRAIN_IMAGES)
with open(local_file, 'rb') as f:
train_images = extract_images(f)
local_file = base.maybe_download(TRAIN_LABELS, train_dir,
SOURCE_URL + TRAIN_LABELS)
with open(local_file, 'rb') as f:
train_labels = extract_labels(f, one_hot=one_hot)
local_file = base.maybe_download(TEST_IMAGES, train_dir,
SOURCE_URL + TEST_IMAGES)
with open(local_file, 'rb') as f:
test_images = extract_images(f)
local_file = base.maybe_download(TEST_LABELS, train_dir,
SOURCE_URL + TEST_LABELS)
with open(local_file, 'rb') as f:
test_labels = extract_labels(f, one_hot=one_hot)
if not 0 <= validation_size <= len(train_images):
raise ValueError(
'Validation size should be between 0 and {}. Received: {}.'
.format(len(train_images), validation_size))
validation_images = train_images[:validation_size]
validation_labels = train_labels[:validation_size]
train_images = train_images[validation_size:]
train_labels = train_labels[validation_size:]
train = DataSet(train_images, train_labels, dtype=dtype, reshape=reshape)
validation = DataSet(validation_images,
validation_labels,
dtype=dtype,
reshape=reshape)
test = DataSet(test_images, test_labels, dtype=dtype, reshape=reshape)
return base.Datasets(train=train, validation=validation, test=test)
def load_mnist(train_dir='MNIST-data'):
return read_data_sets(train_dir)
| 33.327068
| 80
| 0.659109
|
6badeac5cc6a6f18405aaf63478d4baf9fa0166d
| 672
|
py
|
Python
|
backend/estruturas_de_dados/fila.py
|
cristianomg/RecomendacaoFilmesComGrafos
|
eaa78ccd07e7df122016951c53f6e1474b281231
|
[
"MIT"
] | null | null | null |
backend/estruturas_de_dados/fila.py
|
cristianomg/RecomendacaoFilmesComGrafos
|
eaa78ccd07e7df122016951c53f6e1474b281231
|
[
"MIT"
] | 3
|
2021-03-31T20:24:36.000Z
|
2021-12-13T20:26:37.000Z
|
backend/estruturas_de_dados/fila.py
|
cristianomg/RecomendacaoFilmesComGrafos
|
eaa78ccd07e7df122016951c53f6e1474b281231
|
[
"MIT"
] | null | null | null |
from estruturas_de_dados import lista_ligada
class Fila:
def __init__(self):
self.__elementos = lista_ligada.ListaLigada()
@property
def tamanho(self):
return self.__elementos.tamanho
@property
def inicio(self):
return self.__elementos.recuperar_elemento_no(0)
def enfilerar(self, elemento):
self.__elementos.inserir(elemento)
def desenfilerar(self):
if self.__elementos.tamanho >= 1:
elemento = self.__elementos.recuperar_elemento_no(0)
self.__elementos.remover_pos(0)
return elemento
else:
return None
def contem(self, elemento):
return self.__elementos.contem(elemento)
def __str__(self):
return self.__elementos.__str__()
| 20.363636
| 55
| 0.763393
|
463a2003d35379dae8c79101b24b2dc3db306ca5
| 58
|
py
|
Python
|
hello.py
|
aschrad2/electron-app-python
|
47b7ae9977a5dc9264675dc4a31ae9d469c45802
|
[
"CC0-1.0"
] | null | null | null |
hello.py
|
aschrad2/electron-app-python
|
47b7ae9977a5dc9264675dc4a31ae9d469c45802
|
[
"CC0-1.0"
] | null | null | null |
hello.py
|
aschrad2/electron-app-python
|
47b7ae9977a5dc9264675dc4a31ae9d469c45802
|
[
"CC0-1.0"
] | null | null | null |
import sys
print('Hello from Python!')
sys.stdout.flush()
| 14.5
| 27
| 0.741379
|
136d3d14ce82dc33463f9120a0ab31de31119f41
| 8,410
|
py
|
Python
|
airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py
|
Daemonxiao/airbyte
|
34146564ba17423da8000e983722094f2426367e
|
[
"MIT"
] | null | null | null |
airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py
|
Daemonxiao/airbyte
|
34146564ba17423da8000e983722094f2426367e
|
[
"MIT"
] | null | null | null |
airbyte-integrations/connectors/source-amazon-ads/source_amazon_ads/streams/report_streams/products_report.py
|
Daemonxiao/airbyte
|
34146564ba17423da8000e983722094f2426367e
|
[
"MIT"
] | null | null | null |
#
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
from copy import copy
from .report_streams import RecordType, ReportStream
METRICS_MAP = {
"campaigns": [
"bidPlus",
"campaignName",
"campaignId",
"campaignStatus",
"campaignBudget",
"campaignRuleBasedBudget",
"applicableBudgetRuleId",
"applicableBudgetRuleName",
"impressions",
"clicks",
"cost",
"attributedConversions1d",
"attributedConversions7d",
"attributedConversions14d",
"attributedConversions30d",
"attributedConversions1dSameSKU",
"attributedConversions7dSameSKU",
"attributedConversions14dSameSKU",
"attributedConversions30dSameSKU",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedSales1d",
"attributedSales7d",
"attributedSales14d",
"attributedSales30d",
"attributedSales1dSameSKU",
"attributedSales7dSameSKU",
"attributedSales14dSameSKU",
"attributedSales30dSameSKU",
"attributedUnitsOrdered1dSameSKU",
"attributedUnitsOrdered7dSameSKU",
"attributedUnitsOrdered14dSameSKU",
"attributedUnitsOrdered30dSameSKU",
],
"adGroups": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"impressions",
"clicks",
"cost",
"attributedConversions1d",
"attributedConversions7d",
"attributedConversions14d",
"attributedConversions30d",
"attributedConversions1dSameSKU",
"attributedConversions7dSameSKU",
"attributedConversions14dSameSKU",
"attributedConversions30dSameSKU",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedSales1d",
"attributedSales7d",
"attributedSales14d",
"attributedSales30d",
"attributedSales1dSameSKU",
"attributedSales7dSameSKU",
"attributedSales14dSameSKU",
"attributedSales30dSameSKU",
"attributedUnitsOrdered1dSameSKU",
"attributedUnitsOrdered7dSameSKU",
"attributedUnitsOrdered14dSameSKU",
"attributedUnitsOrdered30dSameSKU",
],
"keywords": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"keywordId",
"keywordText",
"matchType",
"impressions",
"clicks",
"cost",
"attributedConversions1d",
"attributedConversions7d",
"attributedConversions14d",
"attributedConversions30d",
"attributedConversions1dSameSKU",
"attributedConversions7dSameSKU",
"attributedConversions14dSameSKU",
"attributedConversions30dSameSKU",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedSales1d",
"attributedSales7d",
"attributedSales14d",
"attributedSales30d",
"attributedSales1dSameSKU",
"attributedSales7dSameSKU",
"attributedSales14dSameSKU",
"attributedSales30dSameSKU",
"attributedUnitsOrdered1dSameSKU",
"attributedUnitsOrdered7dSameSKU",
"attributedUnitsOrdered14dSameSKU",
"attributedUnitsOrdered30dSameSKU",
],
"productAds": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"adId",
"impressions",
"clicks",
"cost",
"currency",
"asin",
"attributedConversions1d",
"attributedConversions7d",
"attributedConversions14d",
"attributedConversions30d",
"attributedConversions1dSameSKU",
"attributedConversions7dSameSKU",
"attributedConversions14dSameSKU",
"attributedConversions30dSameSKU",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedSales1d",
"attributedSales7d",
"attributedSales14d",
"attributedSales30d",
"attributedSales1dSameSKU",
"attributedSales7dSameSKU",
"attributedSales14dSameSKU",
"attributedSales30dSameSKU",
"attributedUnitsOrdered1dSameSKU",
"attributedUnitsOrdered7dSameSKU",
"attributedUnitsOrdered14dSameSKU",
"attributedUnitsOrdered30dSameSKU",
],
"asins_keywords": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"keywordId",
"keywordText",
"adId",
"asin",
"otherAsin",
"sku",
"currency",
"matchType",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedUnitsOrdered1dOtherSKU",
"attributedUnitsOrdered7dOtherSKU",
"attributedUnitsOrdered14dOtherSKU",
"attributedUnitsOrdered30dOtherSKU",
"attributedSales1dOtherSKU",
"attributedSales7dOtherSKU",
"attributedSales14dOtherSKU",
"attributedSales30dOtherSKU",
],
"asins_targets": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"adId",
"asin",
"otherAsin",
"sku",
"currency",
"matchType",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedUnitsOrdered1dOtherSKU",
"attributedUnitsOrdered7dOtherSKU",
"attributedUnitsOrdered14dOtherSKU",
"attributedUnitsOrdered30dOtherSKU",
"attributedSales1dOtherSKU",
"attributedSales7dOtherSKU",
"attributedSales14dOtherSKU",
"attributedSales30dOtherSKU",
"targetId",
"targetingText",
"targetingType",
],
"targets": [
"campaignName",
"campaignId",
"adGroupName",
"adGroupId",
"targetId",
"targetingExpression",
"targetingText",
"targetingType",
"impressions",
"clicks",
"cost",
"attributedConversions1d",
"attributedConversions7d",
"attributedConversions14d",
"attributedConversions30d",
"attributedConversions1dSameSKU",
"attributedConversions7dSameSKU",
"attributedConversions14dSameSKU",
"attributedConversions30dSameSKU",
"attributedUnitsOrdered1d",
"attributedUnitsOrdered7d",
"attributedUnitsOrdered14d",
"attributedUnitsOrdered30d",
"attributedSales1d",
"attributedSales7d",
"attributedSales14d",
"attributedSales30d",
"attributedSales1dSameSKU",
"attributedSales7dSameSKU",
"attributedSales14dSameSKU",
"attributedSales30dSameSKU",
"attributedUnitsOrdered1dSameSKU",
"attributedUnitsOrdered7dSameSKU",
"attributedUnitsOrdered14dSameSKU",
"attributedUnitsOrdered30dSameSKU",
],
}
class SponsoredProductsReportStream(ReportStream):
"""
https://advertising.amazon.com/API/docs/en-us/sponsored-products/2-0/openapi#/Reports
"""
primary_key = ["profileId", "recordType", "reportDate"]
def report_init_endpoint(self, record_type: str) -> str:
return f"/v2/sp/{record_type}/report"
metrics_map = METRICS_MAP
def _get_init_report_body(self, report_date: str, record_type: str, profile):
metrics_list = self.metrics_map[record_type]
body = {
"reportDate": report_date,
}
if RecordType.ASINS in record_type:
body["campaignType"] = "sponsoredProducts"
if profile.accountInfo.type == "vendor":
metrics_list = copy(metrics_list)
metrics_list.remove("sku")
# adId is automatically added to the report by amazon and requesting adId causes an amazon error
if "adId" in metrics_list:
metrics_list.remove("adId")
return {**body, "metrics": ",".join(metrics_list)}
| 30.693431
| 96
| 0.62604
|
76f778ae23c140bc734436d62f6cb087460f8e7d
| 7,372
|
py
|
Python
|
field_application/field_application/campus_field/forms.py
|
tonylifepix/CryD-r
|
a38fdb003a44cd2905570424bea722316b923575
|
[
"Apache-2.0"
] | null | null | null |
field_application/field_application/campus_field/forms.py
|
tonylifepix/CryD-r
|
a38fdb003a44cd2905570424bea722316b923575
|
[
"Apache-2.0"
] | null | null | null |
field_application/field_application/campus_field/forms.py
|
tonylifepix/CryD-r
|
a38fdb003a44cd2905570424bea722316b923575
|
[
"Apache-2.0"
] | null | null | null |
#-*- coding: utf-8 -*-
from datetime import timedelta
from django import forms
from django.utils import timezone
from django.forms import ModelForm
from django.forms.extras.widgets import SelectDateWidget
from django.forms import Textarea, RadioSelect
from django.forms import CheckboxSelectMultiple
from django.db.models import Q
from field_application.campus_field.models import ExhibitApplication
from field_application.campus_field.models import PublicityApplication
# not necessary now
def check_exhibit_board_num(place_list, start_date, end_date,
time_list, exhibit_board_number):
board_num_upper_limit = \
{u'CD座文化长廊': 40, u'A座文化大厅': 30,
u'西南餐厅前空地': 45, u'荔山餐厅前空地': 45}
for place in place_list:
for i in range((end_date-start_date).days+1):
date = start_date + timedelta(days=i)
for time in time_list:
apps = ExhibitApplication.objects.filter(
Q(start_date__lte=date) & \
Q(end_date__gte=date),
place__contains=place,
time__contains=time).filter(approved=True)
used_num = \
sum((app.exhibit_board_number for app in apps))
if used_num + exhibit_board_number > \
board_num_upper_limit[place]:
msg = place + \
date.strftime(' %Y-%m-%d ') + \
time + \
u'只剩下%d个展板' % \
(board_num_upper_limit[place]-used_num)
return msg
return None
class ExhibitApplicationForm(forms.ModelForm):
exhibit_board_number = forms.IntegerField(min_value=0, max_value=50)
class Meta:
model = ExhibitApplication
exclude = ['organization', 'approved', 'application_time']
widgets = {
'start_date': SelectDateWidget(),
'end_date': SelectDateWidget(),
'activity_summary': Textarea(),
'remarks': Textarea(),
}
def clean_exhibit_board_number(self):
num = self.cleaned_data.get('exhibit_board_number')
if num <= 0:
raise forms.ValidationError(u'展板数应为正数')
return num
def clean(self):
# In django validation workflow, if field is not supply
# clean_field() will not be called
if 'place' not in self.cleaned_data or \
'exhibit_board_number' not in self.cleaned_data or\
'start_date' not in self.cleaned_data or \
'end_date' not in self.cleaned_data or \
'time' not in self.cleaned_data:
return super(ExhibitApplicationForm, self).clean()
# check date
start_date = self.cleaned_data['start_date']
end_date = self.cleaned_data['end_date']
if end_date < start_date:
msg = u'结束时间不能早于开始时间'
self._errors['end_date'] = self.error_class([msg])
del self.cleaned_data['end_date']
return super(ExhibitApplicationForm, self).clean()
if end_date > start_date + timedelta(days=6):
msg = u'展览时间不得超过7天'
self._errors['end_date'] = self.error_class([msg])
del self.cleaned_data['end_date']
return super(ExhibitApplicationForm, self).clean()
return super(ExhibitApplicationForm, self).clean()
def clean_start_date(self):
start_date = self.cleaned_data['start_date']
now = timezone.now().date()
if start_date < now:
raise forms.ValidationError(u'所填日期已过')
if start_date >= now + timedelta(days=14):
raise forms.ValidationError(
u'申请的场地使用时间距离现在不能超过14天')
return start_date
def clean_end_date(self):
end_date = self.cleaned_data.get('end_date')
now = timezone.now().date()
if end_date < now:
raise forms.ValidationError(u'所填日期已过')
if end_date >= now + timedelta(days=14):
raise forms.ValidationError(u'申请的场地使用时间距离现在不能超过14天')
return end_date
# 检查场地是否已经存在通过的申请 not used now
def check_publicity(place_list,
start_date, end_date, time_list):
for place in place_list:
for i in range((end_date-start_date).days+1):
date = start_date + timedelta(days=i)
for time in time_list:
if PublicityApplication.objects.filter(
Q(start_date__lte=date) & \
Q(end_date__gte=date),
place__contains=place,
time__contains=time).filter(approved=True):
msg = place + \
date.strftime(' %Y-%m-%d ') + \
time + \
u'已经被人使用'
return msg
return None
class PublicityApplicationForm(forms.ModelForm):
class Meta:
model = PublicityApplication
exclude = ['organization', 'approved', 'application_time']
widgets = {
'start_date': SelectDateWidget(),
'end_date': SelectDateWidget(),
'activity_summary': Textarea(),
'remarks': Textarea(),
}
def clean_start_date(self):
start_date = self.cleaned_data.get('start_date')
now = timezone.now().date()
if start_date < now:
raise forms.ValidationError(u'所填日期已过')
if start_date >= now + timedelta(days=14):
raise forms.ValidationError(u'申请的场地使用时间距离现在不能超过14天')
return start_date
def clean_end_date(self):
end_date = self.cleaned_data.get('end_date')
now = timezone.now().date()
if end_date < now:
raise forms.ValidationError(u'所填日期已过')
if end_date >= now + timedelta(days=14):
raise forms.ValidationError(u'申请的场地使用时间距离现在不能超过14天')
return end_date
def clean(self):
if 'place' not in self.cleaned_data or \
'start_date' not in self.cleaned_data or \
'end_date' not in self.cleaned_data or \
'time' not in self.cleaned_data:
return super(PublicityApplicationForm, self).clean()
start_date = self.cleaned_data.get('start_date')
end_date = self.cleaned_data.get('end_date')
# 检查开始日期和结束日期
if start_date and end_date and end_date < start_date:
msg = u'结束时间不能早于开始时间'
self._errors['end_date'] = self.error_class([msg])
del self.cleaned_data['end_date']
return super(PublicityApplicationForm, self).clean()
if start_date and end_date \
and end_date > start_date + timedelta(days=1):
msg = u'展览时间不得超过2天'
self._errors['end_date'] = self.error_class([msg])
del self.cleaned_data['end_date']
place = self.cleaned_data.get('place')
other_place = self.cleaned_data.get('other_place')
if u'其它' in place and not other_place:
msg = u"选择‘其它’场地时请在右栏输入框填入所申请的场地"
self._errors['place'] = self.error_class([msg])
elif not u'其它' in place and other_place:
msg = u"若要申请其它场地,请勾选‘其它’,否则右边输入框请留空"
self._errors['place'] = self.error_class([msg])
return super(PublicityApplicationForm, self).clean()
| 38.395833
| 72
| 0.591427
|
555ad46ebe74143571835e9815ed6869fefb2d6d
| 33,427
|
py
|
Python
|
isort/settings.py
|
viourr/isort
|
3dc4d89003d5aa2f1f0c511346fb5622b05702c4
|
[
"MIT"
] | null | null | null |
isort/settings.py
|
viourr/isort
|
3dc4d89003d5aa2f1f0c511346fb5622b05702c4
|
[
"MIT"
] | null | null | null |
isort/settings.py
|
viourr/isort
|
3dc4d89003d5aa2f1f0c511346fb5622b05702c4
|
[
"MIT"
] | null | null | null |
"""isort/settings.py.
Defines how the default settings for isort should be loaded
"""
import configparser
import fnmatch
import os
import posixpath
import re
import stat
import subprocess # nosec: Needed for gitignore support.
import sys
from functools import lru_cache
from pathlib import Path
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
FrozenSet,
Iterable,
List,
Optional,
Pattern,
Set,
Tuple,
Type,
Union,
)
from warnings import warn
from . import sorting, stdlibs
from ._future import dataclass, field
from .exceptions import (
FormattingPluginDoesNotExist,
InvalidSettingsPath,
ProfileDoesNotExist,
SortingFunctionDoesNotExist,
UnsupportedSettings,
)
from .profiles import profiles
from .sections import DEFAULT as SECTION_DEFAULTS
from .sections import FIRSTPARTY, FUTURE, LOCALFOLDER, STDLIB, THIRDPARTY
from .wrap_modes import WrapModes
from .wrap_modes import from_string as wrap_mode_from_string
if TYPE_CHECKING:
toml: Any
else:
from ._vendored import toml
_SHEBANG_RE = re.compile(br"^#!.*\bpython[23w]?\b")
CYTHON_EXTENSIONS = frozenset({"pyx", "pxd"})
SUPPORTED_EXTENSIONS = frozenset({"py", "pyi", *CYTHON_EXTENSIONS})
BLOCKED_EXTENSIONS = frozenset({"pex"})
FILE_SKIP_COMMENTS: Tuple[str, ...] = (
"isort:" + "skip_file",
"isort: " + "skip_file",
) # Concatenated to avoid this file being skipped
MAX_CONFIG_SEARCH_DEPTH: int = 25 # The number of parent directories to for a config file within
STOP_CONFIG_SEARCH_ON_DIRS: Tuple[str, ...] = (".git", ".hg")
VALID_PY_TARGETS: Tuple[str, ...] = tuple(
target.replace("py", "") for target in dir(stdlibs) if not target.startswith("_")
)
CONFIG_SOURCES: Tuple[str, ...] = (
".isort.cfg",
"pyproject.toml",
"setup.cfg",
"tox.ini",
".editorconfig",
)
DEFAULT_SKIP: FrozenSet[str] = frozenset(
{
".venv",
"venv",
".tox",
".eggs",
".git",
".hg",
".mypy_cache",
".nox",
".svn",
".bzr",
"_build",
"buck-out",
"build",
"dist",
".pants.d",
".direnv",
"node_modules",
"__pypackages__",
}
)
CONFIG_SECTIONS: Dict[str, Tuple[str, ...]] = {
".isort.cfg": ("settings", "isort"),
"pyproject.toml": ("tool.isort",),
"setup.cfg": ("isort", "tool:isort"),
"tox.ini": ("isort", "tool:isort"),
".editorconfig": ("*", "*.py", "**.py", "*.{py}"),
}
FALLBACK_CONFIG_SECTIONS: Tuple[str, ...] = ("isort", "tool:isort", "tool.isort")
IMPORT_HEADING_PREFIX = "import_heading_"
KNOWN_PREFIX = "known_"
KNOWN_SECTION_MAPPING: Dict[str, str] = {
STDLIB: "STANDARD_LIBRARY",
FUTURE: "FUTURE_LIBRARY",
FIRSTPARTY: "FIRST_PARTY",
THIRDPARTY: "THIRD_PARTY",
LOCALFOLDER: "LOCAL_FOLDER",
}
RUNTIME_SOURCE = "runtime"
DEPRECATED_SETTINGS = ("not_skip", "keep_direct_and_as_imports")
_STR_BOOLEAN_MAPPING = {
"y": True,
"yes": True,
"t": True,
"on": True,
"1": True,
"true": True,
"n": False,
"no": False,
"f": False,
"off": False,
"0": False,
"false": False,
}
@dataclass(frozen=True)
class _Config:
"""Defines the data schema and defaults used for isort configuration.
NOTE: known lists, such as known_standard_library, are intentionally not complete as they are
dynamically determined later on.
"""
py_version: str = "3"
force_to_top: FrozenSet[str] = frozenset()
skip: FrozenSet[str] = DEFAULT_SKIP
extend_skip: FrozenSet[str] = frozenset()
skip_glob: FrozenSet[str] = frozenset()
extend_skip_glob: FrozenSet[str] = frozenset()
skip_gitignore: bool = False
line_length: int = 79
wrap_length: int = 0
line_ending: str = ""
sections: Tuple[str, ...] = SECTION_DEFAULTS
no_sections: bool = False
known_future_library: FrozenSet[str] = frozenset(("__future__",))
known_third_party: FrozenSet[str] = frozenset()
known_first_party: FrozenSet[str] = frozenset()
known_local_folder: FrozenSet[str] = frozenset()
known_standard_library: FrozenSet[str] = frozenset()
extra_standard_library: FrozenSet[str] = frozenset()
known_other: Dict[str, FrozenSet[str]] = field(default_factory=dict)
multi_line_output: WrapModes = WrapModes.GRID # type: ignore
forced_separate: Tuple[str, ...] = ()
indent: str = " " * 4
comment_prefix: str = " #"
length_sort: bool = False
length_sort_straight: bool = False
length_sort_sections: FrozenSet[str] = frozenset()
add_imports: FrozenSet[str] = frozenset()
remove_imports: FrozenSet[str] = frozenset()
append_only: bool = False
reverse_relative: bool = False
force_single_line: bool = False
single_line_exclusions: Tuple[str, ...] = ()
default_section: str = THIRDPARTY
import_headings: Dict[str, str] = field(default_factory=dict)
balanced_wrapping: bool = False
use_parentheses: bool = False
order_by_type: bool = True
atomic: bool = False
lines_after_imports: int = -1
lines_between_sections: int = 1
lines_between_types: int = 0
combine_as_imports: bool = False
combine_star: bool = False
include_trailing_comma: bool = False
from_first: bool = False
verbose: bool = False
quiet: bool = False
force_adds: bool = False
force_alphabetical_sort_within_sections: bool = False
force_alphabetical_sort: bool = False
force_grid_wrap: int = 0
force_sort_within_sections: bool = False
lexicographical: bool = False
group_by_package: bool = False
ignore_whitespace: bool = False
no_lines_before: FrozenSet[str] = frozenset()
no_inline_sort: bool = False
ignore_comments: bool = False
case_sensitive: bool = False
sources: Tuple[Dict[str, Any], ...] = ()
virtual_env: str = ""
conda_env: str = ""
ensure_newline_before_comments: bool = False
directory: str = ""
profile: str = ""
honor_noqa: bool = False
src_paths: Tuple[Path, ...] = ()
old_finders: bool = False
remove_redundant_aliases: bool = False
float_to_top: bool = False
filter_files: bool = False
formatter: str = ""
formatting_function: Optional[Callable[[str, str, object], str]] = None
color_output: bool = False
treat_comments_as_code: FrozenSet[str] = frozenset()
treat_all_comments_as_code: bool = False
supported_extensions: FrozenSet[str] = SUPPORTED_EXTENSIONS
blocked_extensions: FrozenSet[str] = BLOCKED_EXTENSIONS
constants: FrozenSet[str] = frozenset()
classes: FrozenSet[str] = frozenset()
variables: FrozenSet[str] = frozenset()
dedup_headings: bool = False
only_sections: bool = False
only_modified: bool = False
combine_straight_imports: bool = False
auto_identify_namespace_packages: bool = True
namespace_packages: FrozenSet[str] = frozenset()
follow_links: bool = True
indented_import_headings: bool = True
honor_case_in_force_sorted_sections: bool = False
sort_relative_in_force_sorted_sections: bool = False
overwrite_in_place: bool = False
reverse_sort: bool = False
star_first: bool = False
import_dependencies = Dict[str, str]
git_ignore: Dict[Path, Set[Path]] = field(default_factory=dict)
format_error: str = "{error}: {message}"
format_success: str = "{success}: {message}"
sort_order: str = "natural"
def __post_init__(self) -> None:
py_version = self.py_version
if py_version == "auto": # pragma: no cover
if sys.version_info.major == 2 and sys.version_info.minor <= 6:
py_version = "2"
elif sys.version_info.major == 3 and (
sys.version_info.minor <= 5 or sys.version_info.minor >= 9
):
py_version = "3"
else:
py_version = f"{sys.version_info.major}{sys.version_info.minor}"
if py_version not in VALID_PY_TARGETS:
raise ValueError(
f"The python version {py_version} is not supported. "
"You can set a python version with the -py or --python-version flag. "
f"The following versions are supported: {VALID_PY_TARGETS}"
)
if py_version != "all":
object.__setattr__(self, "py_version", f"py{py_version}")
if not self.known_standard_library:
object.__setattr__(
self, "known_standard_library", frozenset(getattr(stdlibs, self.py_version).stdlib)
)
if self.multi_line_output == WrapModes.VERTICAL_GRID_GROUPED_NO_COMMA: # type: ignore
vertical_grid_grouped = WrapModes.VERTICAL_GRID_GROUPED # type: ignore
object.__setattr__(self, "multi_line_output", vertical_grid_grouped)
if self.force_alphabetical_sort:
object.__setattr__(self, "force_alphabetical_sort_within_sections", True)
object.__setattr__(self, "no_sections", True)
object.__setattr__(self, "lines_between_types", 1)
object.__setattr__(self, "from_first", True)
if self.wrap_length > self.line_length:
raise ValueError(
"wrap_length must be set lower than or equal to line_length: "
f"{self.wrap_length} > {self.line_length}."
)
def __hash__(self) -> int:
return id(self)
_DEFAULT_SETTINGS = {**vars(_Config()), "source": "defaults"}
class Config(_Config):
def __init__(
self,
settings_file: str = "",
settings_path: str = "",
config: Optional[_Config] = None,
**config_overrides: Any,
):
self._known_patterns: Optional[List[Tuple[Pattern[str], str]]] = None
self._section_comments: Optional[Tuple[str, ...]] = None
self._skips: Optional[FrozenSet[str]] = None
self._skip_globs: Optional[FrozenSet[str]] = None
self._sorting_function: Optional[Callable[..., List[str]]] = None
if config:
config_vars = vars(config).copy()
config_vars.update(config_overrides)
config_vars["py_version"] = config_vars["py_version"].replace("py", "")
config_vars.pop("_known_patterns")
config_vars.pop("_section_comments")
config_vars.pop("_skips")
config_vars.pop("_skip_globs")
config_vars.pop("_sorting_function")
super().__init__(**config_vars) # type: ignore
return
# We can't use self.quiet to conditionally show warnings before super.__init__() is called
# at the end of this method. _Config is also frozen so setting self.quiet isn't possible.
# Therefore we extract quiet early here in a variable and use that in warning conditions.
quiet = config_overrides.get("quiet", False)
sources: List[Dict[str, Any]] = [_DEFAULT_SETTINGS]
config_settings: Dict[str, Any]
project_root: str
if settings_file:
config_settings = _get_config_data(
settings_file,
CONFIG_SECTIONS.get(os.path.basename(settings_file), FALLBACK_CONFIG_SECTIONS),
)
project_root = os.path.dirname(settings_file)
if not config_settings and not quiet:
warn(
f"A custom settings file was specified: {settings_file} but no configuration "
"was found inside. This can happen when [settings] is used as the config "
"header instead of [isort]. "
"See: https://pycqa.github.io/isort/docs/configuration/config_files"
"/#custom_config_files for more information."
)
elif settings_path:
if not os.path.exists(settings_path):
raise InvalidSettingsPath(settings_path)
settings_path = os.path.abspath(settings_path)
project_root, config_settings = _find_config(settings_path)
else:
config_settings = {}
project_root = os.getcwd()
profile_name = config_overrides.get("profile", config_settings.get("profile", ""))
profile: Dict[str, Any] = {}
if profile_name:
if profile_name not in profiles:
import pkg_resources
for plugin in pkg_resources.iter_entry_points("isort.profiles"):
profiles.setdefault(plugin.name, plugin.load())
if profile_name not in profiles:
raise ProfileDoesNotExist(profile_name)
profile = profiles[profile_name].copy()
profile["source"] = f"{profile_name} profile"
sources.append(profile)
if config_settings:
sources.append(config_settings)
if config_overrides:
config_overrides["source"] = RUNTIME_SOURCE
sources.append(config_overrides)
combined_config = {**profile, **config_settings, **config_overrides}
if "indent" in combined_config:
indent = str(combined_config["indent"])
if indent.isdigit():
indent = " " * int(indent)
else:
indent = indent.strip("'").strip('"')
if indent.lower() == "tab":
indent = "\t"
combined_config["indent"] = indent
known_other = {}
import_headings = {}
for key, value in tuple(combined_config.items()):
# Collect all known sections beyond those that have direct entries
if key.startswith(KNOWN_PREFIX) and key not in (
"known_standard_library",
"known_future_library",
"known_third_party",
"known_first_party",
"known_local_folder",
):
import_heading = key[len(KNOWN_PREFIX) :].lower()
maps_to_section = import_heading.upper()
combined_config.pop(key)
if maps_to_section in KNOWN_SECTION_MAPPING:
section_name = f"known_{KNOWN_SECTION_MAPPING[maps_to_section].lower()}"
if section_name in combined_config and not quiet:
warn(
f"Can't set both {key} and {section_name} in the same config file.\n"
f"Default to {section_name} if unsure."
"\n\n"
"See: https://pycqa.github.io/isort/"
"#custom-sections-and-ordering."
)
else:
combined_config[section_name] = frozenset(value)
else:
known_other[import_heading] = frozenset(value)
if maps_to_section not in combined_config.get("sections", ()) and not quiet:
warn(
f"`{key}` setting is defined, but {maps_to_section} is not"
" included in `sections` config option:"
f" {combined_config.get('sections', SECTION_DEFAULTS)}.\n\n"
"See: https://pycqa.github.io/isort/"
"#custom-sections-and-ordering."
)
if key.startswith(IMPORT_HEADING_PREFIX):
import_headings[key[len(IMPORT_HEADING_PREFIX) :].lower()] = str(value)
# Coerce all provided config values into their correct type
default_value = _DEFAULT_SETTINGS.get(key, None)
if default_value is None:
continue
combined_config[key] = type(default_value)(value)
for section in combined_config.get("sections", ()):
if section in SECTION_DEFAULTS:
continue
if not section.lower() in known_other:
config_keys = ", ".join(known_other.keys())
warn(
f"`sections` setting includes {section}, but no known_{section.lower()} "
"is defined. "
f"The following known_SECTION config options are defined: {config_keys}."
)
if "directory" not in combined_config:
combined_config["directory"] = (
os.path.dirname(config_settings["source"])
if config_settings.get("source", None)
else os.getcwd()
)
path_root = Path(combined_config.get("directory", project_root)).resolve()
path_root = path_root if path_root.is_dir() else path_root.parent
if "src_paths" not in combined_config:
combined_config["src_paths"] = (path_root / "src", path_root)
else:
src_paths: Set[Path] = set()
for src_path in combined_config.get("src_paths", ()):
full_paths = (
path_root.glob(src_path) if "*" in str(src_path) else [path_root / src_path]
)
for path in full_paths:
src_paths.add(path)
combined_config["src_paths"] = tuple(src_paths)
if "formatter" in combined_config:
import pkg_resources
for plugin in pkg_resources.iter_entry_points("isort.formatters"):
if plugin.name == combined_config["formatter"]:
combined_config["formatting_function"] = plugin.load()
break
else:
raise FormattingPluginDoesNotExist(combined_config["formatter"])
# Remove any config values that are used for creating config object but
# aren't defined in dataclass
combined_config.pop("source", None)
combined_config.pop("sources", None)
combined_config.pop("runtime_src_paths", None)
deprecated_options_used = [
option for option in combined_config if option in DEPRECATED_SETTINGS
]
if deprecated_options_used:
for deprecated_option in deprecated_options_used:
combined_config.pop(deprecated_option)
if not quiet:
warn(
"W0503: Deprecated config options were used: "
f"{', '.join(deprecated_options_used)}."
"Please see the 5.0.0 upgrade guide: "
"https://pycqa.github.io/isort/docs/upgrade_guides/5.0.0.html"
)
if known_other:
combined_config["known_other"] = known_other
if import_headings:
for import_heading_key in import_headings:
combined_config.pop(f"{IMPORT_HEADING_PREFIX}{import_heading_key}")
combined_config["import_headings"] = import_headings
unsupported_config_errors = {}
for option in set(combined_config.keys()).difference(
getattr(_Config, "__dataclass_fields__", {}).keys()
):
for source in reversed(sources):
if option in source:
unsupported_config_errors[option] = {
"value": source[option],
"source": source["source"],
}
if unsupported_config_errors:
raise UnsupportedSettings(unsupported_config_errors)
super().__init__(sources=tuple(sources), **combined_config) # type: ignore
def is_supported_filetype(self, file_name: str) -> bool:
_root, ext = os.path.splitext(file_name)
ext = ext.lstrip(".")
if ext in self.supported_extensions:
return True
if ext in self.blocked_extensions:
return False
# Skip editor backup files.
if file_name.endswith("~"):
return False
try:
if stat.S_ISFIFO(os.stat(file_name).st_mode):
return False
except OSError:
pass
try:
with open(file_name, "rb") as fp:
line = fp.readline(100)
except OSError:
return False
else:
return bool(_SHEBANG_RE.match(line))
def _check_folder_gitignore(self, folder: str) -> Optional[Path]:
env = {"LANG": "C.UTF-8"}
try:
topfolder_result = subprocess.check_output( # nosec # skipcq: PYL-W1510
["git", "-C", folder, "rev-parse", "--show-toplevel"], encoding="utf-8", env=env
)
except subprocess.CalledProcessError:
return None
git_folder = Path(topfolder_result.rstrip()).resolve()
files: List[str] = []
# don't check symlinks; either part of the repo and would be checked
# twice, or is external to the repo and git won't know anything about it
for root, _dirs, git_files in os.walk(git_folder, followlinks=False):
for git_file in git_files:
git_path = os.path.join(root, git_file)
# followlinks only disables walking into linked dirs
if not os.path.islink(git_path): # pragma: no cover
files.append(git_path)
git_options = ["-C", str(git_folder), "-c", "core.quotePath="]
try:
ignored = subprocess.check_output( # nosec # skipcq: PYL-W1510
["git", *git_options, "check-ignore", "-z", "--stdin"],
encoding="utf-8",
env=env,
input="\0".join(files),
)
except subprocess.CalledProcessError:
return None
self.git_ignore[git_folder] = {Path(f) for f in ignored.rstrip("\0").split("\0")}
return git_folder
def is_skipped(self, file_path: Path) -> bool:
"""Returns True if the file and/or folder should be skipped based on current settings."""
if self.directory and Path(self.directory) in file_path.resolve().parents:
file_name = os.path.relpath(file_path.resolve(), self.directory)
else:
file_name = str(file_path)
os_path = str(file_path)
normalized_path = os_path.replace("\\", "/")
if normalized_path[1:2] == ":":
normalized_path = normalized_path[2:]
for skip_path in self.skips:
if posixpath.abspath(normalized_path) == posixpath.abspath(
skip_path.replace("\\", "/")
):
return True
position = os.path.split(file_name)
while position[1]:
if position[1] in self.skips:
return True
position = os.path.split(position[0])
for sglob in self.skip_globs:
if fnmatch.fnmatch(file_name, sglob) or fnmatch.fnmatch("/" + file_name, sglob):
return True
if not (os.path.isfile(os_path) or os.path.isdir(os_path) or os.path.islink(os_path)):
return True
if self.skip_gitignore:
if file_path.name == ".git": # pragma: no cover
return True
git_folder = None
file_paths = [file_path, file_path.resolve()]
for folder in self.git_ignore:
if any(folder in path.parents for path in file_paths):
git_folder = folder
break
else:
git_folder = self._check_folder_gitignore(str(file_path.parent))
if git_folder and any(path in self.git_ignore[git_folder] for path in file_paths):
return True
return False
@property
def known_patterns(self) -> List[Tuple[Pattern[str], str]]:
if self._known_patterns is not None:
return self._known_patterns
self._known_patterns = []
pattern_sections = [STDLIB] + [section for section in self.sections if section != STDLIB]
for placement in reversed(pattern_sections):
known_placement = KNOWN_SECTION_MAPPING.get(placement, placement).lower()
config_key = f"{KNOWN_PREFIX}{known_placement}"
known_modules = getattr(self, config_key, self.known_other.get(known_placement, ()))
extra_modules = getattr(self, f"extra_{known_placement}", ())
all_modules = set(extra_modules).union(known_modules)
known_patterns = [
pattern
for known_pattern in all_modules
for pattern in self._parse_known_pattern(known_pattern)
]
for known_pattern in known_patterns:
regexp = "^" + known_pattern.replace("*", ".*").replace("?", ".?") + "$"
self._known_patterns.append((re.compile(regexp), placement))
return self._known_patterns
@property
def section_comments(self) -> Tuple[str, ...]:
if self._section_comments is not None:
return self._section_comments
self._section_comments = tuple(f"# {heading}" for heading in self.import_headings.values())
return self._section_comments
@property
def skips(self) -> FrozenSet[str]:
if self._skips is not None:
return self._skips
self._skips = self.skip.union(self.extend_skip)
return self._skips
@property
def skip_globs(self) -> FrozenSet[str]:
if self._skip_globs is not None:
return self._skip_globs
self._skip_globs = self.skip_glob.union(self.extend_skip_glob)
return self._skip_globs
@property
def sorting_function(self) -> Callable[..., List[str]]:
if self._sorting_function is not None:
return self._sorting_function
if self.sort_order == "natural":
self._sorting_function = sorting.naturally
elif self.sort_order == "native":
self._sorting_function = sorted
else:
available_sort_orders = ["natural", "native"]
import pkg_resources
for sort_plugin in pkg_resources.iter_entry_points("isort.sort_function"):
available_sort_orders.append(sort_plugin.name)
if sort_plugin.name == self.sort_order:
self._sorting_function = sort_plugin.load()
break
else:
raise SortingFunctionDoesNotExist(self.sort_order, available_sort_orders)
return self._sorting_function
def _parse_known_pattern(self, pattern: str) -> List[str]:
"""Expand pattern if identified as a directory and return found sub packages"""
if pattern.endswith(os.path.sep):
patterns = [
filename
for filename in os.listdir(os.path.join(self.directory, pattern))
if os.path.isdir(os.path.join(self.directory, pattern, filename))
]
else:
patterns = [pattern]
return patterns
def _get_str_to_type_converter(setting_name: str) -> Union[Callable[[str], Any], Type[Any]]:
type_converter: Union[Callable[[str], Any], Type[Any]] = type(
_DEFAULT_SETTINGS.get(setting_name, "")
)
if type_converter == WrapModes:
type_converter = wrap_mode_from_string
return type_converter
def _as_list(value: str) -> List[str]:
if isinstance(value, list):
return [item.strip() for item in value]
filtered = [item.strip() for item in value.replace("\n", ",").split(",") if item.strip()]
return filtered
def _abspaths(cwd: str, values: Iterable[str]) -> Set[str]:
paths = {
os.path.join(cwd, value)
if not value.startswith(os.path.sep) and value.endswith(os.path.sep)
else value
for value in values
}
return paths
@lru_cache()
def _find_config(path: str) -> Tuple[str, Dict[str, Any]]:
current_directory = path
tries = 0
while current_directory and tries < MAX_CONFIG_SEARCH_DEPTH:
for config_file_name in CONFIG_SOURCES:
potential_config_file = os.path.join(current_directory, config_file_name)
if os.path.isfile(potential_config_file):
config_data: Dict[str, Any]
try:
config_data = _get_config_data(
potential_config_file, CONFIG_SECTIONS[config_file_name]
)
except Exception:
warn(f"Failed to pull configuration information from {potential_config_file}")
config_data = {}
if config_data:
return (current_directory, config_data)
for stop_dir in STOP_CONFIG_SEARCH_ON_DIRS:
if os.path.isdir(os.path.join(current_directory, stop_dir)):
return (current_directory, {})
new_directory = os.path.split(current_directory)[0]
if new_directory == current_directory:
break
current_directory = new_directory
tries += 1
return (path, {})
@lru_cache()
def _get_config_data(file_path: str, sections: Tuple[str]) -> Dict[str, Any]:
settings: Dict[str, Any] = {}
with open(file_path, encoding="utf-8") as config_file:
if file_path.endswith(".toml"):
config = toml.load(config_file)
for section in sections:
config_section = config
for key in section.split("."):
config_section = config_section.get(key, {})
settings.update(config_section)
else:
if file_path.endswith(".editorconfig"):
line = "\n"
last_position = config_file.tell()
while line:
line = config_file.readline()
if "[" in line:
config_file.seek(last_position)
break
last_position = config_file.tell()
config = configparser.ConfigParser(strict=False)
config.read_file(config_file)
for section in sections:
if section.startswith("*.{") and section.endswith("}"):
extension = section[len("*.{") : -1]
for config_key in config.keys():
if (
config_key.startswith("*.{")
and config_key.endswith("}")
and extension
in map(
lambda text: text.strip(), config_key[len("*.{") : -1].split(",") # type: ignore # noqa
)
):
settings.update(config.items(config_key))
elif config.has_section(section):
settings.update(config.items(section))
if settings:
settings["source"] = file_path
if file_path.endswith(".editorconfig"):
indent_style = settings.pop("indent_style", "").strip()
indent_size = settings.pop("indent_size", "").strip()
if indent_size == "tab":
indent_size = settings.pop("tab_width", "").strip()
if indent_style == "space":
settings["indent"] = " " * (indent_size and int(indent_size) or 4)
elif indent_style == "tab":
settings["indent"] = "\t" * (indent_size and int(indent_size) or 1)
max_line_length = settings.pop("max_line_length", "").strip()
if max_line_length and (max_line_length == "off" or max_line_length.isdigit()):
settings["line_length"] = (
float("inf") if max_line_length == "off" else int(max_line_length)
)
settings = {
key: value
for key, value in settings.items()
if key in _DEFAULT_SETTINGS.keys() or key.startswith(KNOWN_PREFIX)
}
for key, value in settings.items():
existing_value_type = _get_str_to_type_converter(key)
if existing_value_type == tuple:
settings[key] = tuple(_as_list(value))
elif existing_value_type == frozenset:
settings[key] = frozenset(_as_list(settings.get(key))) # type: ignore
elif existing_value_type == bool:
# Only some configuration formats support native boolean values.
if not isinstance(value, bool):
value = _as_bool(value)
settings[key] = value
elif key.startswith(KNOWN_PREFIX):
settings[key] = _abspaths(os.path.dirname(file_path), _as_list(value))
elif key == "force_grid_wrap":
try:
result = existing_value_type(value)
except ValueError: # backwards compatibility for true / false force grid wrap
result = 0 if value.lower().strip() == "false" else 2
settings[key] = result
elif key == "comment_prefix":
settings[key] = str(value).strip("'").strip('"')
else:
settings[key] = existing_value_type(value)
return settings
def _as_bool(value: str) -> bool:
"""Given a string value that represents True or False, returns the Boolean equivalent.
Heavily inspired from distutils strtobool.
"""
try:
return _STR_BOOLEAN_MAPPING[value.lower()]
except KeyError:
raise ValueError(f"invalid truth value {value}")
DEFAULT_CONFIG = Config()
| 38.202286
| 120
| 0.592724
|
c2b9875a5a3bf32065f5f0987dffbc74cf8673d8
| 7,381
|
py
|
Python
|
hackerrank_template_generator/test_functions.py
|
KaShing96/personal-projects
|
e20134ba694cd095caa62211b18396a011d4f4f2
|
[
"MIT"
] | null | null | null |
hackerrank_template_generator/test_functions.py
|
KaShing96/personal-projects
|
e20134ba694cd095caa62211b18396a011d4f4f2
|
[
"MIT"
] | null | null | null |
hackerrank_template_generator/test_functions.py
|
KaShing96/personal-projects
|
e20134ba694cd095caa62211b18396a011d4f4f2
|
[
"MIT"
] | null | null | null |
# === Script variables ===
TIME_LIMIT = 10 # Default time limit
# === Imports ===
import pytest
import warnings
import json
import os
from pathlib import Path
import functions as fnc
from func_timeout import func_timeout as fto
from func_timeout.exceptions import FunctionTimedOut
from datetime import datetime
# === Debug function ===
def DEBUG(*args, **kwargs):
"""
Debug function.
"""
print(*args, **kwargs)
# === Test function ===
def get_cases(f, sep="---"):
"""
Extracts inputs and outputs for each test/verification case within f, where f is a folder.
Params
======
f: str
The folder containing the cases to be extracted.
sep: str
The substring separating comments from the input from the output in each case file.
Returns
=======
cases: []
Array of dictionaries containing each case
Each case is a dictionary with the following fields:
- "filename": The name of the file
- "comments": Any comments in the folder
- "inputs": The inputs
- "outputs": The expected outputs
Raises
======
AssertionError:
If the given path is not a folder.
"""
# Initialise path
p = Path(f)
# Assert that target folder is a folder
assert p.is_dir()
# List of cases in the folder
cases = []
# Loop through all cases within the folder
for f in p.iterdir():
# Open each case file
with open(f) as fr:
# Obtain the contents of the case file
contents = fr.read()
# The case files are structured such that it has COMMENTS, followed by the separator substring, followed by the INPUTS, followed by the separator substring, and finally followed by the OUTPUTS
# Instantiate case dictionary
c = {}
# Separate the contents by the separator, and then clean each individual element of newline/whitespace
contents = contents.split(sep)
contents = [c.strip() for c in contents]
# Populate dictionary
c["filename"] = f.with_suffix("").name
c["inputs"] = contents[0]
c["outputs"] = contents[1]
if len(contents) == 3:
c["comments"] = contents[2]
# Add dictionary to list of cases
cases.append(c)
# After all cases have been looped through, return cases
return cases
def run_test(f, dcstr=[], dcix=[], raise_errors=True):
"""
Runs the test required for all cases within the folder. Any cases within dcstr and dcix are ignored.
Params
======
f: str
The name of the test folder.
dcstr: []
Array of strings.
If the element of dcstr is an exact match with any of the case file names, the case is ignored.
dcix: []
Array of integers.
Less reliable 'ignore' method. This ignores the 0-indexed element of the collected cases.
raise_errors: bool
Whether any errors gathered while testing the cases should be returned. If false, only whether a case succeeded or failed is returned.
"""
# === Ensure that dcstr and dcix are lists ===
# Check if dcstr is a list
if type(dcstr) == type([]):
# If it is a list, we identify if all elements are of type 'str'
# Filter out all non-string types
other_types = [type(x) for x in dcstr]
other_types = list(set(other_types))
other_types = list(filter(lambda x: x != type(""), other_types))
# If there are non-string types, raise an exception
if other_types:
raise Exception(f"dcstr must be a list of strings. Elements of type {other_types} found.")
# If it's not a list, check if it's a string
elif type(dcstr) == type(""):
# Set it to a list of string
dcstr = [dcstr]
# If it is neither a string or a list, we reject it.
else:
raise Exception(f"dcstr must be a string or a list of strings, not a {str(type(dcstr))}.")
# We do the same check for dcix
if type(dcix) == type([]):
# If it is a list, we identify if all elements are of type 'str'
# Filter out all non-string types
other_types = [type(x) for x in dcix]
other_types = list(set(other_types))
other_types = list(filter(lambda x: x != type(1), other_types))
# If there are non-string types, raise an exception
if other_types:
raise Exception(f"dcstr must be a list of integers. Elements of type {other_types} found.")
# If it's not a list, check if it's a string
elif type(dcix) == type(1):
# Set it to a list of string
dcix = [dcix]
# If it is neither a string or a list, we reject it.
else:
raise Exception(f"dcix must be an integer or a list of integers, not a {str(type(dcix))}.")
# === Get cases from the folder ===
cases = None
try:
# Obtain cases
cases = get_cases(f)
except AssertionError:
raise Exception(f"The path '{f}' is not a valid folder.")
# Ensure there are cases to run through
if not cases:
raise Exception(f"There are no test cases in '{f}'.")
# === Loop through each case ===
for cx, c in enumerate(cases):
# If cx is in dcix, ignore this case
# If the name of the case is in dcstr, we ignore the case
if cx in dcix or c["filename"] in dcstr:
continue
# Print out test case
print(f"({f}) test case {cx} '{c['filename']}': ", end="")
# Instantiate fnc arguments
fnc.set_inputs(c["inputs"])
# Get start time
start_time = datetime.now()
# Run function
try:
fto(
TIME_LIMIT,
fnc.main,
(c["inputs"], )
)
# If the function times out, add it as an error
except FunctionTimedOut as e:
c["errors"] = e
# For any other exception, we also add it as an error
# The reason we separate FunctionTimedOut from Exception is because FunctionTimedOut is not considered an Exception by the program
except Exception as e:
c["errors"] = e
# Here, we check if there are errors
if "errors" in c.keys():
# If there are errors, print the error out
print(c["errors"])
else:
# If there are no exceptions, we check that the answer is correct
try:
assert "\n".join(fnc.fptr.get_answers()).strip() == c["outputs"].strip()
print("Success")
except Exception as e:
# There are, as of now, no errors
# We set the errors to the assertion error
c["errors"] = e
# Print the error
print(e)
# Finally, we raise all errors so py.test recognises that this test case failed
for c in cases:
if "errors" in c.keys():
raise c["errors"]
# === Run test ===
def test_test_values():
"""
Runs test on test values.
"""
run_test("test_cases")
def test_ver_values():
"""
Runs test on verification values.
"""
run_test("verification_cases")
| 28.388462
| 204
| 0.58339
|
3737c4194f1dd1d8d90dc27ea4e24b635d78f94c
| 3,180
|
py
|
Python
|
test/test_sms_campaigns_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | 46
|
2018-12-18T21:37:18.000Z
|
2022-03-30T20:38:29.000Z
|
test/test_sms_campaigns_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | 41
|
2018-03-02T13:22:48.000Z
|
2021-11-25T04:32:03.000Z
|
test/test_sms_campaigns_api.py
|
tarraschk/APIv3-python-library
|
440883d3a7ca503a655f16bf69cef6c122a95e01
|
[
"MIT"
] | 45
|
2018-01-22T14:42:32.000Z
|
2021-12-16T19:58:45.000Z
|
# coding: utf-8
"""
SendinBlue API
SendinBlue provide a RESTFul API that can be used with any languages. With this API, you will be able to : - Manage your campaigns and get the statistics - Manage your contacts - Send transactional Emails and SMS - and much more... You can download our wrappers at https://github.com/orgs/sendinblue **Possible responses** | Code | Message | | :-------------: | ------------- | | 200 | OK. Successful Request | | 201 | OK. Successful Creation | | 202 | OK. Request accepted | | 204 | OK. Successful Update/Deletion | | 400 | Error. Bad Request | | 401 | Error. Authentication Needed | | 402 | Error. Not enough credit, plan upgrade needed | | 403 | Error. Permission denied | | 404 | Error. Object does not exist | | 405 | Error. Method not allowed | | 406 | Error. Not Acceptable | # noqa: E501
OpenAPI spec version: 3.0.0
Contact: contact@sendinblue.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import sib_api_v3_sdk
from sib_api_v3_sdk.api.sms_campaigns_api import SMSCampaignsApi # noqa: E501
from sib_api_v3_sdk.rest import ApiException
class TestSMSCampaignsApi(unittest.TestCase):
"""SMSCampaignsApi unit test stubs"""
def setUp(self):
self.api = sib_api_v3_sdk.api.sms_campaigns_api.SMSCampaignsApi() # noqa: E501
def tearDown(self):
pass
def test_create_sms_campaign(self):
"""Test case for create_sms_campaign
Creates an SMS campaign # noqa: E501
"""
pass
def test_delete_sms_campaign(self):
"""Test case for delete_sms_campaign
Delete an SMS campaign # noqa: E501
"""
pass
def test_get_sms_campaign(self):
"""Test case for get_sms_campaign
Get an SMS campaign # noqa: E501
"""
pass
def test_get_sms_campaigns(self):
"""Test case for get_sms_campaigns
Returns the information for all your created SMS campaigns # noqa: E501
"""
pass
def test_request_sms_recipient_export(self):
"""Test case for request_sms_recipient_export
Export an SMS campaign's recipients # noqa: E501
"""
pass
def test_send_sms_campaign_now(self):
"""Test case for send_sms_campaign_now
Send your SMS campaign immediately # noqa: E501
"""
pass
def test_send_sms_report(self):
"""Test case for send_sms_report
Send an SMS campaign's report # noqa: E501
"""
pass
def test_send_test_sms(self):
"""Test case for send_test_sms
Send a test SMS campaign # noqa: E501
"""
pass
def test_update_sms_campaign(self):
"""Test case for update_sms_campaign
Update an SMS campaign # noqa: E501
"""
pass
def test_update_sms_campaign_status(self):
"""Test case for update_sms_campaign_status
Update a campaign's status # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 30.285714
| 856
| 0.636164
|
39de768835d34ad1fec421b133be92c256ee2d37
| 4,164
|
py
|
Python
|
app/project_configuration/settings/base.py
|
michel-rodrigues/viggio_backend
|
f419f0b939209722e1eb1e272f33de172cd5c1f1
|
[
"MIT"
] | null | null | null |
app/project_configuration/settings/base.py
|
michel-rodrigues/viggio_backend
|
f419f0b939209722e1eb1e272f33de172cd5c1f1
|
[
"MIT"
] | null | null | null |
app/project_configuration/settings/base.py
|
michel-rodrigues/viggio_backend
|
f419f0b939209722e1eb1e272f33de172cd5c1f1
|
[
"MIT"
] | null | null | null |
import os
from datetime import timedelta
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
SECRET_KEY = os.environ.get('SECRET_KEY')
DEBUG = int(os.environ.get('DEBUG', default=0))
ALLOWED_HOSTS = ['viggio.com.br', 'local.viggio.com.br', 'localhost', '127.0.0.1']
# https://ubuntu.com/blog/django-behind-a-proxy-fixing-absolute-urls
USE_X_FORWARDED_HOST = True
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Application definition
DEFAULT_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
THIRD_PARTY_APPS = [
'rest_framework',
]
PROJECT_APPS = [
'admin_configuration',
'accounts',
'categories',
'customers',
'message_bus',
'orders',
'post_office',
'request_shoutout',
'shoutouts',
'talents',
'transcoder',
'wirecard',
]
INSTALLED_APPS = DEFAULT_APPS + THIRD_PARTY_APPS + PROJECT_APPS
AUTHENTICATION_BACKENDS = [
'django.contrib.auth.backends.ModelBackend',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'utils.middlewares.DisableCSRFMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project_configuration.urls'
AUTH_USER_MODEL = 'accounts.User'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'admin_configuration/templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'admin_configuration.context_processors.from_environment',
],
},
},
]
WSGI_APPLICATION = 'project_configuration.wsgi.application'
DATABASES = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE'),
'NAME': os.environ.get('DATABASE_NAME'),
'USER': os.environ.get('DATABASE_USER'),
'PASSWORD': os.environ.get('DATABASE_PASSWORD'),
'HOST': os.environ.get('DATABASE_HOST'),
'PORT': os.environ.get('DATABASE_PORT'),
}
}
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework_simplejwt.authentication.JWTAuthentication',
'rest_framework.authentication.SessionAuthentication',
],
}
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(days=int(os.environ['ACCESS_TOKEN_LIFETIME_DAYS'])),
'REFRESH_TOKEN_LIFETIME': timedelta(days=int(os.environ['REFRESH_TOKEN_LIFETIME_DAYS'])),
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
SP_TZ = 3
SAFE_PERIOD = 1 # Expire order 1 hour before third party payment processor
EXPIRATION_DAYS = 5 # Period until a Order expire
USE_I18N = True
USE_L10N = True
USE_TZ = True
FILE_UPLOAD_MAX_MEMORY_SIZE = 209715200 # it's bytes value == 200MB
REDIS_HOST = os.environ.get('REDIS_HOST')
REDIS_PORT = os.environ.get('REDIS_PORT')
CELERY_BROKER_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}'
CELERY_RESULT_BACKEND = f'redis://{REDIS_HOST}:{REDIS_PORT}'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
| 27.576159
| 93
| 0.696686
|
19ea4ffd9ce35fa33a457cd3e4aaed4f362407b1
| 5,553
|
py
|
Python
|
xfel/merging/refine.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
xfel/merging/refine.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
xfel/merging/refine.py
|
rimmartin/cctbx_project
|
644090f9432d9afc22cfb542fc3ab78ca8e15e5d
|
[
"BSD-3-Clause-LBNL"
] | null | null | null |
# -*- mode: python; coding: utf-8; indent-tabs-mode: nil; python-indent: 2 -*-
#
# $Id$
from __future__ import division
import math
from cctbx.array_family import flex
from scitbx.lbfgs.tst_curvatures import lbfgs_with_curvatures_mix_in
class find_scale(lbfgs_with_curvatures_mix_in):
def __init__(self, scaler, params):
"""This function is largely redundant, because it duplicates what is
done during mark1 scaling.
@param scaler Database structure of scaling input
@param params work_params
"""
# Extract an ordered union of all Miller indices observed on all
# frames, and the database structure of observations.
self._millers = scaler.millers['merged_asu_hkl']
self._observations = scaler._observations
# XXX Could be more clever about this here, because this will
# determine scale factors for rejected frames as well! Better
# named selected_frames?
self._subset = scaler.frames['data_subset']
self._data = self._observations.get_double('i')
self._hkl = self._observations.get_int('hkl_id')
self._sigmas = self._observations.get_double('sigi')
self._frames = self._observations.get_int('frame_id')
# XXX Useless assert?
assert len(self._hkl) == len(self._data) \
and len(self._hkl) == len(self._sigmas)
# Initialise all per-frame scale factors to one.
n_frames = len(self._subset)
self.x = flex.double(n_frames + len(self._millers))
for i in range(n_frames):
self.x[i] = 1
# For each Miller index, the weighted (XXX) average intensity of
# all the observations serves as an initial estimate of the merged
# intensity. This is all Monte Carlo scaling would do.
assert len(self._millers) == len(scaler.summed_wt_I) \
and len(self._millers) == len(scaler.summed_weight)
for i in range(len(self._millers)):
if scaler.summed_weight[i] > 0:
self.x[n_frames + i] = scaler.summed_wt_I[i] / scaler.summed_weight[i]
# The weight of each observation is (1 / sigma)**2, where sigma is
# the standard deviation of the observation as determined during
# integration. An observation is assigned a weight of zero if
#
# The observation was made on a rejected frame
#
# The integrated intensity of the observation is non-positive
#
# The variance of the observation, s**2, as determined during
# integration, is non-positive
#
# The d-spacing of the observation lies outside the
# user-supplied resolution limits
#
# XXX Check Bolotovsky et al.: use sigma**2 or sigma for the
# weighting?
self.w = flex.double(len(self._hkl))
for i in range(len(self.w)):
if not self._subset[self._frames[i]]:
continue
if not params.include_negatives and self._data[i] <= 0:
continue
# XXX Should compare against sqrt(eps) instead? See also
# scales_non_positive below.
v = self._sigmas[i]**2
if v <= 0:
continue
# Test d_min first, because it is more likely to have a lower
# resolution limit than an upper resolution limit. XXX Is this
# ever enforced in practice, i.e. is this the first time the
# limits are applied?
d = scaler.params.target_unit_cell.d(self._millers[self._hkl[i]])
if (params.d_min is not None and d < params.d_min) or \
(params.d_max is not None and d > params.d_max):
continue
self.w[i] = 1 / v
# Should be the last call in the application-specific minimizer
# class. This will call lbfgs's run() function and perform
# optimization.
super(find_scale, self).__init__() #max_iterations=2000
def compute_functional_and_gradients(self):
"""The compute_functional_and_gradients() function
@return Two-tuple of the value of the functional, and an
<code>n</code>-long vector with the values of the
gradients at the current position
"""
#from libtbx.development.timers import Profiler
from xfel import compute_functional_and_gradients
n_frames = len(self._subset)
#p = Profiler("compute_functional_and_gradients [C++]")
(f, g) = compute_functional_and_gradients(
self.x, self.w, n_frames, self._observations)
#del p
# XXX Only output this every 100 iterations or so.
scales = self.x[0:len(self._subset)]
stats = flex.mean_and_variance(scales)
print "* f =% 10.4e, g =% f+/-%f" % (
math.sqrt(f),
stats.mean(),
stats.unweighted_sample_standard_deviation())
# Warn if there are non_positive per-frame scaling factors.
scales_non_positive = scales.select(scales <= 1e-6) # XXX Or just zero!
if len(scales_non_positive) > 0:
stats = flex.mean_and_variance(scales_non_positive)
if len(scales_non_positive) > 1:
sigma = stats.unweighted_sample_standard_deviation()
else:
sigma = 0
print "Have %d non-positive per-frame scaling factors: " \
"%f+/-%f [%f, %f]" % (
len(scales_non_positive),
stats.mean(),
sigma,
flex.min(scales_non_positive),
flex.max(scales_non_positive))
return (f, g)
def curvatures(self):
from xfel import curvatures
n_frames = len(self._subset)
return curvatures(
self.x, self.w, n_frames, self._observations)
def get_scaling_results(self, results, scaler):
from xfel import get_scaling_results_mark2
return get_scaling_results_mark2(
self.x, self.w, results, scaler.params.target_unit_cell)
| 34.277778
| 78
| 0.676751
|
0c47e68c497b8b965e8ad64cc16c45656f6e90c7
| 990
|
py
|
Python
|
Chapter04/code/chapter4_02.py
|
sTone3/Tkinter-GUI-Application-Development-Cookbook
|
6c42edaa66c30d13e1903f3a3a66f609f20783d9
|
[
"MIT"
] | 43
|
2018-05-09T23:29:07.000Z
|
2022-03-05T02:03:32.000Z
|
Chapter04/code/chapter4_02.py
|
wenxuefeng3930/Tkinter-GUI-Application-Development-Cookbook
|
6e22bc472e07a9aaa017ef66bfd32644578a873a
|
[
"MIT"
] | null | null | null |
Chapter04/code/chapter4_02.py
|
wenxuefeng3930/Tkinter-GUI-Application-Development-Cookbook
|
6e22bc472e07a9aaa017ef66bfd32644578a873a
|
[
"MIT"
] | 40
|
2018-06-18T14:44:13.000Z
|
2022-03-12T09:11:50.000Z
|
import tkinter as tk
import tkinter.messagebox as mb
class App(tk.Tk):
def __init__(self):
super().__init__()
self.create_button(mb.askyesno, "Ask Yes/No",
"Returns True or False")
self.create_button(mb.askquestion, "Ask a question",
"Returns 'yes' or 'no'")
self.create_button(mb.askokcancel, "Ask Ok/Cancel",
"Returns True or False")
self.create_button(mb.askretrycancel, "Ask Retry/Cancel",
"Returns True or False")
self.create_button(mb.askyesnocancel, "Ask Yes/No/Cancel",
"Returns True, False or None")
def create_button(self, dialog, title, message):
command = lambda: print(dialog(title, message))
btn = tk.Button(self, text=title, command=command)
btn.pack(padx=40, pady=5, expand=True, fill=tk.BOTH)
if __name__ == "__main__":
app = App()
app.mainloop()
| 38.076923
| 66
| 0.576768
|
546a093192f3d5db023e68edf7793cbb256c0621
| 406
|
py
|
Python
|
src/evgen/latency_sources/__init__.py
|
smdsbz/latency-sim
|
192fb0da89be718a312ea5400902aa6e99b7a174
|
[
"MIT"
] | null | null | null |
src/evgen/latency_sources/__init__.py
|
smdsbz/latency-sim
|
192fb0da89be718a312ea5400902aa6e99b7a174
|
[
"MIT"
] | null | null | null |
src/evgen/latency_sources/__init__.py
|
smdsbz/latency-sim
|
192fb0da89be718a312ea5400902aa6e99b7a174
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .latency_source import *
from .constant_sources import *
from .sampling_sources import *
def get_latency_source(type_: str, name: str, *_, **kwarg):
if type_ == 'constant':
return ConstantSource(name, **kwarg)
if type_ == 'normal-distribution':
return NormalDistributionSource(name, **kwarg)
raise RuntimeError(f'unknown type {type_} for {name}')
| 31.230769
| 59
| 0.682266
|
b87e9a8475a77aedc607bd92a1da2bb8be1ab676
| 28,620
|
py
|
Python
|
saleor/plugins/manager.py
|
largotuan/sonjay
|
9826dc7dfe9acd5a03aaa136020f105309805402
|
[
"CC-BY-4.0"
] | 1
|
2021-03-23T20:27:50.000Z
|
2021-03-23T20:27:50.000Z
|
saleor/plugins/manager.py
|
largotuan/sonjay
|
9826dc7dfe9acd5a03aaa136020f105309805402
|
[
"CC-BY-4.0"
] | 18
|
2021-03-23T20:32:14.000Z
|
2022-03-12T01:04:06.000Z
|
saleor/plugins/manager.py
|
largotuan/sonjay
|
9826dc7dfe9acd5a03aaa136020f105309805402
|
[
"CC-BY-4.0"
] | null | null | null |
from decimal import Decimal
from typing import TYPE_CHECKING, Any, Dict, Iterable, List, Optional, Tuple, Union
import opentracing
from django.conf import settings
from django.core.handlers.wsgi import WSGIRequest
from django.http import HttpResponse, HttpResponseNotFound
from django.utils.module_loading import import_string
from django_countries.fields import Country
from prices import Money, TaxedMoney
from ..checkout import base_calculations
from ..core.payments import PaymentInterface
from ..core.prices import quantize_price
from ..core.taxes import TaxType, zero_taxed_money
from ..discount import DiscountInfo
from .base_plugin import ExternalAccessTokens
from .models import PluginConfiguration
if TYPE_CHECKING:
# flake8: noqa
from ..account.models import Address, User
from ..channel.models import Channel
from ..checkout.fetch import CheckoutInfo, CheckoutLineInfo
from ..checkout.models import Checkout
from ..invoice.models import Invoice
from ..order.models import Fulfillment, Order, OrderLine
from ..page.models import Page
from ..payment.interface import (
CustomerSource,
GatewayResponse,
InitializedPaymentResponse,
PaymentData,
PaymentGateway,
TokenConfig,
)
from ..product.models import (
Collection,
Product,
ProductType,
ProductVariant,
ProductVariantChannelListing,
)
from .base_plugin import BasePlugin
class PluginsManager(PaymentInterface):
"""Base manager for handling plugins logic."""
plugins: List["BasePlugin"] = []
def __init__(self, plugins: List[str]):
with opentracing.global_tracer().start_active_span("PluginsManager.__init__"):
self.plugins = []
all_configs = self._get_all_plugin_configs()
for plugin_path in plugins:
with opentracing.global_tracer().start_active_span(f"{plugin_path}"):
PluginClass = import_string(plugin_path)
if PluginClass.PLUGIN_ID in all_configs:
existing_config = all_configs[PluginClass.PLUGIN_ID]
plugin_config = existing_config.configuration
active = existing_config.active
else:
plugin_config = PluginClass.DEFAULT_CONFIGURATION
active = PluginClass.get_default_active()
plugin = PluginClass(configuration=plugin_config, active=active)
self.plugins.append(plugin)
def __run_method_on_plugins(
self, method_name: str, default_value: Any, *args, **kwargs
):
"""Try to run a method with the given name on each declared plugin."""
with opentracing.global_tracer().start_active_span(
f"PluginsManager.{method_name}"
):
value = default_value
for plugin in self.plugins:
value = self.__run_method_on_single_plugin(
plugin, method_name, value, *args, **kwargs
)
return value
def __run_method_on_single_plugin(
self,
plugin: Optional["BasePlugin"],
method_name: str,
previous_value: Any,
*args,
**kwargs,
) -> Any:
"""Run method_name on plugin.
Method will return value returned from plugin's
method. If plugin doesn't have own implementation of expected method_name, it
will return previous_value.
"""
plugin_method = getattr(plugin, method_name, NotImplemented)
if plugin_method == NotImplemented:
return previous_value
returned_value = plugin_method(*args, **kwargs, previous_value=previous_value)
if returned_value == NotImplemented:
return previous_value
return returned_value
def change_user_address(
self, address: "Address", address_type: Optional[str], user: Optional["User"]
) -> "Address":
default_value = address
return self.__run_method_on_plugins(
"change_user_address", default_value, address, address_type, user
)
def calculate_checkout_total(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
address: Optional["Address"],
discounts: Iterable[DiscountInfo],
) -> TaxedMoney:
default_value = base_calculations.base_checkout_total(
subtotal=self.calculate_checkout_subtotal(
checkout_info, lines, address, discounts
),
shipping_price=self.calculate_checkout_shipping(
checkout_info, lines, address, discounts
),
discount=checkout_info.checkout.discount,
currency=checkout_info.checkout.currency,
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_checkout_total",
default_value,
checkout_info,
lines,
address,
discounts,
),
checkout_info.checkout.currency,
)
def calculate_checkout_subtotal(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
address: Optional["Address"],
discounts: Iterable[DiscountInfo],
) -> TaxedMoney:
line_totals = [
self.calculate_checkout_line_total(
checkout_info,
lines,
line_info,
address,
discounts,
)
for line_info in lines
]
default_value = base_calculations.base_checkout_subtotal(
line_totals, checkout_info.checkout.currency
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_checkout_subtotal",
default_value,
checkout_info,
lines,
address,
discounts,
),
checkout_info.checkout.currency,
)
def calculate_checkout_shipping(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
address: Optional["Address"],
discounts: Iterable[DiscountInfo],
) -> TaxedMoney:
default_value = base_calculations.base_checkout_shipping_price(
checkout_info, lines
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_checkout_shipping",
default_value,
checkout_info,
lines,
address,
discounts,
),
checkout_info.checkout.currency,
)
def calculate_order_shipping(self, order: "Order") -> TaxedMoney:
if not order.shipping_method:
return zero_taxed_money(order.currency)
shipping_price = order.shipping_method.channel_listings.get(
channel_id=order.channel_id
).price
default_value = quantize_price(
TaxedMoney(net=shipping_price, gross=shipping_price),
shipping_price.currency,
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_order_shipping", default_value, order
),
order.currency,
)
def get_checkout_shipping_tax_rate(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
address: Optional["Address"],
discounts: Iterable[DiscountInfo],
shipping_price: TaxedMoney,
):
default_value = base_calculations.base_tax_rate(shipping_price)
return self.__run_method_on_plugins(
"get_checkout_shipping_tax_rate",
default_value,
checkout_info,
lines,
address,
discounts,
).quantize(Decimal(".0001"))
def get_order_shipping_tax_rate(self, order: "Order", shipping_price: TaxedMoney):
default_value = base_calculations.base_tax_rate(shipping_price)
return self.__run_method_on_plugins(
"get_order_shipping_tax_rate", default_value, order
).quantize(Decimal(".0001"))
def calculate_checkout_line_total(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
checkout_line_info: "CheckoutLineInfo",
address: Optional["Address"],
discounts: Iterable["DiscountInfo"],
):
default_value = base_calculations.base_checkout_line_total(
checkout_line_info,
checkout_info.channel,
discounts,
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_checkout_line_total",
default_value,
checkout_info,
lines,
checkout_line_info,
address,
discounts,
),
checkout_info.checkout.currency,
)
def calculate_checkout_line_unit_price(
self,
total_line_price: TaxedMoney,
quantity: int,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
checkout_line_info: "CheckoutLineInfo",
address: Optional["Address"],
discounts: Iterable["DiscountInfo"],
):
default_value = base_calculations.base_checkout_line_unit_price(
total_line_price, quantity
)
return quantize_price(
self.__run_method_on_plugins(
"calculate_checkout_line_unit_price",
default_value,
checkout_info,
lines,
checkout_line_info,
address,
discounts,
),
total_line_price.currency,
)
def calculate_order_line_unit(
self,
order: "Order",
order_line: "OrderLine",
variant: "ProductVariant",
product: "Product",
) -> TaxedMoney:
unit_price = order_line.unit_price
default_value = quantize_price(unit_price, unit_price.currency)
return quantize_price(
self.__run_method_on_plugins(
"calculate_order_line_unit",
default_value,
order,
order_line,
variant,
product,
),
order_line.currency,
)
def get_checkout_line_tax_rate(
self,
checkout_info: "CheckoutInfo",
lines: Iterable["CheckoutLineInfo"],
checkout_line_info: "CheckoutLineInfo",
address: Optional["Address"],
discounts: Iterable[DiscountInfo],
unit_price: TaxedMoney,
) -> Decimal:
default_value = base_calculations.base_tax_rate(unit_price)
return self.__run_method_on_plugins(
"get_checkout_line_tax_rate",
default_value,
checkout_info,
lines,
checkout_line_info,
address,
discounts,
).quantize(Decimal(".0001"))
def get_order_line_tax_rate(
self,
order: "Order",
product: "Product",
address: Optional["Address"],
unit_price: TaxedMoney,
) -> Decimal:
default_value = base_calculations.base_tax_rate(unit_price)
return self.__run_method_on_plugins(
"get_order_line_tax_rate", default_value, order, product, address
).quantize(Decimal(".0001"))
def get_tax_rate_type_choices(self) -> List[TaxType]:
default_value: list = []
return self.__run_method_on_plugins("get_tax_rate_type_choices", default_value)
def show_taxes_on_storefront(self) -> bool:
default_value = False
return self.__run_method_on_plugins("show_taxes_on_storefront", default_value)
def apply_taxes_to_product(
self, product: "Product", price: Money, country: Country
):
default_value = quantize_price(
TaxedMoney(net=price, gross=price), price.currency
)
return quantize_price(
self.__run_method_on_plugins(
"apply_taxes_to_product", default_value, product, price, country
),
price.currency,
)
def apply_taxes_to_shipping(
self, price: Money, shipping_address: "Address"
) -> TaxedMoney:
default_value = quantize_price(
TaxedMoney(net=price, gross=price), price.currency
)
return quantize_price(
self.__run_method_on_plugins(
"apply_taxes_to_shipping", default_value, price, shipping_address
),
price.currency,
)
def preprocess_order_creation(
self,
checkout_info: "CheckoutInfo",
discounts: Iterable[DiscountInfo],
lines: Optional[Iterable["CheckoutLineInfo"]] = None,
):
default_value = None
return self.__run_method_on_plugins(
"preprocess_order_creation", default_value, checkout_info, discounts, lines
)
def customer_created(self, customer: "User"):
default_value = None
return self.__run_method_on_plugins("customer_created", default_value, customer)
def customer_updated(self, customer: "User"):
default_value = None
return self.__run_method_on_plugins("customer_updated", default_value, customer)
def product_created(self, product: "Product"):
default_value = None
return self.__run_method_on_plugins("product_created", default_value, product)
def product_updated(self, product: "Product"):
default_value = None
return self.__run_method_on_plugins("product_updated", default_value, product)
def product_deleted(self, product: "Product", variants: List[int]):
default_value = None
return self.__run_method_on_plugins(
"product_deleted", default_value, product, variants
)
def product_variant_created(self, product_variant: "ProductVariant"):
default_value = None
return self.__run_method_on_plugins(
"product_variant_created", default_value, product_variant
)
def product_variant_updated(self, product_variant: "ProductVariant"):
default_value = None
return self.__run_method_on_plugins(
"product_variant_updated", default_value, product_variant
)
def product_variant_deleted(self, product_variant: "ProductVariant"):
default_value = None
return self.__run_method_on_plugins(
"product_variant_deleted", default_value, product_variant
)
def order_created(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_created", default_value, order)
def order_confirmed(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_confirmed", default_value, order)
def invoice_request(
self, order: "Order", invoice: "Invoice", number: Optional[str]
):
default_value = None
return self.__run_method_on_plugins(
"invoice_request", default_value, order, invoice, number
)
def invoice_delete(self, invoice: "Invoice"):
default_value = None
return self.__run_method_on_plugins("invoice_delete", default_value, invoice)
def invoice_sent(self, invoice: "Invoice", email: str):
default_value = None
return self.__run_method_on_plugins(
"invoice_sent", default_value, invoice, email
)
def order_fully_paid(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_fully_paid", default_value, order)
def order_updated(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_updated", default_value, order)
def order_cancelled(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_cancelled", default_value, order)
def order_fulfilled(self, order: "Order"):
default_value = None
return self.__run_method_on_plugins("order_fulfilled", default_value, order)
def fulfillment_created(self, fulfillment: "Fulfillment"):
default_value = None
return self.__run_method_on_plugins(
"fulfillment_created", default_value, fulfillment
)
def checkout_created(self, checkout: "Checkout"):
default_value = None
return self.__run_method_on_plugins("checkout_created", default_value, checkout)
def checkout_updated(self, checkout: "Checkout"):
default_value = None
return self.__run_method_on_plugins("checkout_updated", default_value, checkout)
def page_created(self, page: "Page"):
default_value = None
return self.__run_method_on_plugins("page_created", default_value, page)
def page_updated(self, page: "Page"):
default_value = None
return self.__run_method_on_plugins("page_updated", default_value, page)
def page_deleted(self, page: "Page"):
default_value = None
return self.__run_method_on_plugins("page_deleted", default_value, page)
def initialize_payment(
self, gateway, payment_data: dict
) -> Optional["InitializedPaymentResponse"]:
method_name = "initialize_payment"
default_value = None
gtw = self.get_plugin(gateway)
if not gtw:
return None
return self.__run_method_on_single_plugin(
gtw,
method_name,
previous_value=default_value,
payment_data=payment_data,
)
def authorize_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "authorize_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def capture_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "capture_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def refund_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "refund_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def void_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "void_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def confirm_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "confirm_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def process_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
method_name = "process_payment"
return self.__run_payment_method(gateway, method_name, payment_information)
def token_is_required_as_payment_input(self, gateway) -> bool:
method_name = "token_is_required_as_payment_input"
default_value = True
gtw = self.get_plugin(gateway)
if gtw is not None:
return self.__run_method_on_single_plugin(
gtw,
method_name,
previous_value=default_value,
)
return default_value
def get_client_token(self, gateway, token_config: "TokenConfig") -> str:
method_name = "get_client_token"
default_value = None
gtw = self.get_plugin(gateway)
return self.__run_method_on_single_plugin(
gtw, method_name, default_value, token_config=token_config
)
def list_payment_sources(
self, gateway: str, customer_id: str
) -> List["CustomerSource"]:
default_value: list = []
gtw = self.get_plugin(gateway)
if gtw is not None:
return self.__run_method_on_single_plugin(
gtw, "list_payment_sources", default_value, customer_id=customer_id
)
raise Exception(f"Payment plugin {gateway} is inaccessible!")
def get_active_plugins(self, plugins=None) -> List["BasePlugin"]:
if plugins is None:
plugins = self.plugins
return [plugin for plugin in plugins if plugin.active]
def list_payment_plugin(self, active_only: bool = False) -> Dict[str, "BasePlugin"]:
payment_method = "process_payment"
plugins = self.plugins
if active_only:
plugins = self.get_active_plugins()
return {
plugin.PLUGIN_ID: plugin
for plugin in plugins
if payment_method in type(plugin).__dict__
}
def list_payment_gateways(
self, currency: Optional[str] = None, active_only: bool = True
) -> List["PaymentGateway"]:
payment_plugins = self.list_payment_plugin(active_only=active_only)
# if currency is given return only gateways which support given currency
gateways = []
for plugin in payment_plugins.values():
gateway = plugin.get_payment_gateway(currency=currency, previous_value=None)
if gateway:
gateways.append(gateway)
return gateways
def list_external_authentications(self, active_only: bool = True) -> List[dict]:
plugins = self.plugins
auth_basic_method = "external_obtain_access_tokens"
if active_only:
plugins = self.get_active_plugins()
return [
{"id": plugin.PLUGIN_ID, "name": plugin.PLUGIN_NAME}
for plugin in plugins
if auth_basic_method in type(plugin).__dict__
]
def checkout_available_payment_gateways(
self,
checkout: "Checkout",
) -> List["PaymentGateway"]:
payment_plugins = self.list_payment_plugin(active_only=True)
gateways = []
for plugin in payment_plugins.values():
gateway = plugin.get_payment_gateway_for_checkout(
checkout, previous_value=None
)
if gateway:
gateways.append(gateway)
return gateways
def __run_payment_method(
self,
gateway: str,
method_name: str,
payment_information: "PaymentData",
**kwargs,
) -> "GatewayResponse":
default_value = None
gtw = self.get_plugin(gateway)
if gtw is not None:
resp = self.__run_method_on_single_plugin(
gtw,
method_name,
previous_value=default_value,
payment_information=payment_information,
**kwargs,
)
if resp is not None:
return resp
raise Exception(
f"Payment plugin {gateway} for {method_name}"
" payment method is inaccessible!"
)
def _get_all_plugin_configs(self):
with opentracing.global_tracer().start_active_span("_get_all_plugin_configs"):
if not hasattr(self, "_plugin_configs"):
self._plugin_configs = {
pc.identifier: pc for pc in PluginConfiguration.objects.all()
}
return self._plugin_configs
# FIXME these methods should be more generic
def assign_tax_code_to_object_meta(
self, obj: Union["Product", "ProductType"], tax_code: Optional[str]
):
default_value = None
return self.__run_method_on_plugins(
"assign_tax_code_to_object_meta", default_value, obj, tax_code
)
def get_tax_code_from_object_meta(
self, obj: Union["Product", "ProductType"]
) -> TaxType:
default_value = TaxType(code="", description="")
return self.__run_method_on_plugins(
"get_tax_code_from_object_meta", default_value, obj
)
def get_tax_rate_percentage_value(
self, obj: Union["Product", "ProductType"], country: Country
) -> Decimal:
default_value = Decimal("0").quantize(Decimal("1."))
return self.__run_method_on_plugins(
"get_tax_rate_percentage_value", default_value, obj, country
).quantize(Decimal("1."))
def save_plugin_configuration(self, plugin_id, cleaned_data: dict):
for plugin in self.plugins:
if plugin.PLUGIN_ID == plugin_id:
plugin_configuration, _ = PluginConfiguration.objects.get_or_create(
identifier=plugin_id,
defaults={"configuration": plugin.configuration},
)
configuration = plugin.save_plugin_configuration(
plugin_configuration, cleaned_data
)
configuration.name = plugin.PLUGIN_NAME
configuration.description = plugin.PLUGIN_DESCRIPTION
return configuration
def get_plugin(self, plugin_id: str) -> Optional["BasePlugin"]:
for plugin in self.plugins:
if plugin.PLUGIN_ID == plugin_id:
return plugin
return None
def fetch_taxes_data(self) -> bool:
default_value = False
return self.__run_method_on_plugins("fetch_taxes_data", default_value)
def webhook(self, request: WSGIRequest, plugin_id: str) -> HttpResponse:
split_path = request.path.split(plugin_id, maxsplit=1)
path = None
if len(split_path) == 2:
path = split_path[1]
default_value = HttpResponseNotFound()
plugin = self.get_plugin(plugin_id)
if not plugin:
return default_value
return self.__run_method_on_single_plugin(
plugin, "webhook", default_value, request, path
)
def external_obtain_access_tokens(
self, plugin_id: str, data: dict, request: WSGIRequest
) -> Optional["ExternalAccessTokens"]:
"""Obtain access tokens from authentication plugin."""
default_value = ExternalAccessTokens()
plugin = self.get_plugin(plugin_id)
return self.__run_method_on_single_plugin(
plugin, "external_obtain_access_tokens", default_value, data, request
)
def external_authentication_url(
self, plugin_id: str, data: dict, request: WSGIRequest
) -> dict:
"""Handle authentication request."""
default_value = {} # type: ignore
plugin = self.get_plugin(plugin_id)
return self.__run_method_on_single_plugin(
plugin, "external_authentication_url", default_value, data, request
)
def external_refresh(
self, plugin_id: str, data: dict, request: WSGIRequest
) -> Optional["ExternalAccessTokens"]:
"""Handle authentication refresh request."""
default_value = ExternalAccessTokens()
plugin = self.get_plugin(plugin_id)
return self.__run_method_on_single_plugin(
plugin, "external_refresh", default_value, data, request
)
def authenticate_user(self, request: WSGIRequest) -> Optional["User"]:
"""Authenticate user which should be assigned to the request."""
default_value = None
return self.__run_method_on_plugins("authenticate_user", default_value, request)
def external_logout(self, plugin_id: str, data: dict, request: WSGIRequest) -> dict:
"""Logout the user."""
default_value: Dict[str, str] = {}
plugin = self.get_plugin(plugin_id)
return self.__run_method_on_single_plugin(
plugin, "external_logout", default_value, data, request
)
def external_verify(
self, plugin_id: str, data: dict, request: WSGIRequest
) -> Tuple[Optional["User"], dict]:
"""Verify the provided authentication data."""
default_data: Dict[str, str] = dict()
default_user: Optional["User"] = None
default_value = default_user, default_data
plugin = self.get_plugin(plugin_id)
return self.__run_method_on_single_plugin(
plugin, "external_verify", default_value, data, request
)
def get_plugins_manager() -> PluginsManager:
with opentracing.global_tracer().start_active_span("get_plugins_manager"):
return PluginsManager(settings.PLUGINS)
| 36.319797
| 88
| 0.633438
|
981082d07568624d286a512da73b098274ed0a9d
| 29,756
|
py
|
Python
|
Blender 2.91/2.91/scripts/addons/archimesh/achm_gltools.py
|
calculusrobotics/RNNs-for-Bayesian-State-Estimation
|
2aacf86d2e447e10c840b4926d4de7bc5e46d9bc
|
[
"MIT"
] | 3
|
2019-09-16T10:29:19.000Z
|
2022-02-11T14:43:18.000Z
|
release/scripts/addons/archimesh/achm_gltools.py
|
BlazesRus/Bforartists
|
126bdd9e47cc984fd97ba5299bfb92ec5278e754
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
release/scripts/addons/archimesh/achm_gltools.py
|
BlazesRus/Bforartists
|
126bdd9e47cc984fd97ba5299bfb92ec5278e754
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# ----------------------------------------------------------
# support routines for OpenGL
# Author: Antonio Vazquez (antonioya)
#
# ----------------------------------------------------------
# noinspection PyUnresolvedReferences
import bpy
# noinspection PyUnresolvedReferences
import blf
from math import fabs, sqrt, sin, cos
# noinspection PyUnresolvedReferences
from mathutils import Vector
# noinspection PyUnresolvedReferences
from bpy_extras import view3d_utils
from .achm_room_maker import get_wall_points
# GPU
import bgl
import gpu
from gpu_extras.batch import batch_for_shader
shader = gpu.shader.from_builtin('2D_UNIFORM_COLOR') if not bpy.app.background else None
# -------------------------------------------------------------
# Handle all draw routines (OpenGL main entry point)
#
# -------------------------------------------------------------
def draw_main(context):
region = context.region
rv3d = context.space_data.region_3d
scene = context.scene
rgba = scene.archimesh_text_color
rgbaw = scene.archimesh_walltext_color
fsize = scene.archimesh_font_size
wfsize = scene.archimesh_wfont_size
space = scene.archimesh_hint_space
measure = scene.archimesh_gl_measure
dspname = scene.archimesh_gl_name
bgl.glEnable(bgl.GL_BLEND)
# Display selected or all
if scene.archimesh_gl_ghost is False:
objlist = context.selected_objects
else:
objlist = context.view_layer.objects
# ---------------------------------------
# Generate all OpenGL calls
# ---------------------------------------
for myobj in objlist:
if myobj.visible_get() is True:
# -----------------------------------------------------
# Rooms
# -----------------------------------------------------
if 'RoomGenerator' in myobj:
op = myobj.RoomGenerator[0]
draw_room_data(myobj, op, region, rv3d, rgba, rgbaw, fsize, wfsize, space, measure, dspname)
# -----------------------------------------------------
# Doors
# -----------------------------------------------------
if 'DoorObjectGenerator' in myobj:
op = myobj.DoorObjectGenerator[0]
draw_door_data(myobj, op, region, rv3d, rgba, fsize, space, measure)
# -----------------------------------------------------
# Window (Rail)
# -----------------------------------------------------
if 'WindowObjectGenerator' in myobj:
op = myobj.WindowObjectGenerator[0]
draw_window_rail_data(myobj, op, region, rv3d, rgba, fsize, space, measure)
# -----------------------------------------------------
# Window (Panel)
# -----------------------------------------------------
if 'WindowPanelGenerator' in myobj:
op = myobj.WindowPanelGenerator[0]
draw_window_panel_data(myobj, op, region, rv3d, rgba, fsize, space, measure)
# -----------------------
# restore opengl defaults
# -----------------------
bgl.glLineWidth(1)
bgl.glDisable(bgl.GL_BLEND)
# -------------------------------------------------------------
# Create OpenGL text
#
# right: Align to right
# -------------------------------------------------------------
def draw_text(x_pos, y_pos, display_text, rgba, fsize, right=False):
gap = 12
font_id = 0
blf.size(font_id, fsize, 72)
text_width, text_height = blf.dimensions(font_id, display_text)
if right is True:
newx = x_pos - text_width - gap
else:
newx = x_pos
blf.position(font_id, newx, y_pos, 0)
blf.color(font_id, rgba[0], rgba[1], rgba[2], rgba[3])
blf.draw(font_id, display_text)
return
# -------------------------------------------------------------
# Draw an OpenGL line
#
# -------------------------------------------------------------
def draw_line(v1, v2, rgba):
coords = [(v1[0], v1[1]), (v2[0], v2[1])]
batch = batch_for_shader(shader, 'LINES', {"pos": coords})
# noinspection PyBroadException
try:
if v1 is not None and v2 is not None:
shader.bind()
shader.uniform_float("color", rgba)
batch.draw(shader)
except:
pass
# -------------------------------------------------------------
# Draw room information
#
# rgba: Color
# fsize: Font size
# -------------------------------------------------------------
def draw_room_data(myobj, op, region, rv3d, rgba, rgbaw, fsize, wfsize, space, measure, dspname):
verts, activefaces, activenormals = get_wall_points(myobj)
# --------------------------
# Get line points and draw
# --------------------------
for face in activefaces:
a1 = None
b1 = None
a2 = None
b2 = None
# Bottom
for e in face:
if verts[e][2] == 0:
if a1 is None:
a1 = e
else:
b1 = e
# Top
for e in face:
if verts[e][2] != 0:
if round(verts[a1][0], 5) == round(verts[e][0], 5) and round(verts[a1][1], 5) == round(verts[e][1], 5):
a2 = e
else:
b2 = e
# Points
# a1_p = get_point((verts[a1][0], verts[a1][1], verts[a1][2]), myobj) # bottom
a2_p = get_point((verts[a2][0], verts[a2][1], verts[a2][2] + space), myobj) # top
a2_s1 = get_point((verts[a2][0], verts[a2][1], verts[a2][2]), myobj) # vertical line
a2_s2 = get_point((verts[a2][0], verts[a2][1], verts[a2][2] + space + fsize / 200), myobj) # vertical line
# b1_p = get_point((verts[b1][0], verts[b1][1], verts[b1][2]), myobj) # bottom
b2_p = get_point((verts[b2][0], verts[b2][1], verts[b2][2] + space), myobj) # top
b2_s1 = get_point((verts[b2][0], verts[b2][1], verts[b2][2]), myobj) # vertical line
b2_s2 = get_point((verts[b2][0], verts[b2][1], verts[b2][2] + space + fsize / 200), myobj) # vertical line
# converting to screen coordinates
screen_point_a = view3d_utils.location_3d_to_region_2d(region, rv3d, a2_p)
screen_point_b = view3d_utils.location_3d_to_region_2d(region, rv3d, b2_p)
screen_point_a1 = view3d_utils.location_3d_to_region_2d(region, rv3d, a2_s1)
screen_point_b1 = view3d_utils.location_3d_to_region_2d(region, rv3d, b2_s1)
screen_point_a2 = view3d_utils.location_3d_to_region_2d(region, rv3d, a2_s2)
screen_point_b2 = view3d_utils.location_3d_to_region_2d(region, rv3d, b2_s2)
# colour + line setup
bgl.glEnable(bgl.GL_BLEND)
bgl.glLineWidth(1)
# --------------------------------
# Measures
# --------------------------------
if measure is True:
# Draw text
dist = distance(a2_p, b2_p)
txtpoint3d = interpolate3d(a2_p, b2_p, fabs(dist / 2))
# add a gap
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] + 0.05)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
# Draw horizontal line
draw_line(screen_point_a, screen_point_b, rgba)
# Draw vertical line 1 (upper vertical)
draw_line(screen_point_a1, screen_point_a2, rgba)
# Draw vertical line 2 (upper vertical)
draw_line(screen_point_b1, screen_point_b2, rgba)
# Draw vertical line 1
draw_line(screen_point_a, screen_point_a1, rgba)
# Draw vertical line 2
draw_line(screen_point_b, screen_point_b1, rgba)
# --------------------------------
# Wall Number
# --------------------------------
if dspname is True:
for i in range(0, op.wall_num):
ap = get_point((op.walls[i].glpoint_a[0], op.walls[i].glpoint_a[1], op.walls[i].glpoint_a[2]), myobj)
bp = get_point((op.walls[i].glpoint_b[0], op.walls[i].glpoint_b[1], op.walls[i].glpoint_b[2]), myobj)
dist = distance(ap, bp)
txtpoint3d = interpolate3d(ap, bp, fabs(dist / 2))
# add a gap
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2]) # + op.room_height / 2)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
txt = "Wall: "
if op.walls[i].a is True:
txt = "Advance: "
if op.walls[i].curved is True:
txt = "Curved: "
draw_text(txtpoint2d[0], txtpoint2d[1], txt + str(i + 1), rgbaw, wfsize)
return
# -------------------------------------------------------------
# Draw door information
#
# rgba: Color
# fsize: Font size
# -------------------------------------------------------------
def draw_door_data(myobj, op, region, rv3d, rgba, fsize, space, measure):
# Points
a_p1 = get_point(op.glpoint_a, myobj)
a_p2 = get_point((op.glpoint_a[0] - space, op.glpoint_a[1], op.glpoint_a[2]), myobj)
a_p3 = get_point((op.glpoint_a[0] - space - fsize / 200, op.glpoint_a[1], op.glpoint_a[2]), myobj)
t_p1 = get_point(op.glpoint_b, myobj)
t_p2 = get_point((op.glpoint_b[0] - space, op.glpoint_b[1], op.glpoint_b[2]), myobj)
t_p3 = get_point((op.glpoint_b[0] - space - fsize / 200, op.glpoint_b[1], op.glpoint_b[2]), myobj)
b_p1 = get_point(op.glpoint_b, myobj)
b_p2 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space), myobj)
b_p3 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space + fsize / 200), myobj)
c_p1 = get_point(op.glpoint_c, myobj)
c_p2 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space), myobj)
c_p3 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space + fsize / 200), myobj)
d_p1 = get_point(op.glpoint_d, myobj)
d_p2 = get_point((op.glpoint_d[0], op.glpoint_d[1], op.glpoint_b[2] + space + fsize / 300), myobj)
d_p3 = get_point((op.glpoint_d[0], op.glpoint_d[1], op.glpoint_d[2] - fsize / 250), myobj)
e_p1 = get_point(op.glpoint_e, myobj)
e_p2 = get_point((op.glpoint_e[0], op.glpoint_e[1], op.glpoint_b[2] + space + fsize / 300), myobj)
e_p3 = get_point((op.glpoint_e[0], op.glpoint_e[1], op.glpoint_e[2] - fsize / 250), myobj)
# converting to screen coordinates
screen_point_ap1 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p1)
screen_point_bp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p1)
screen_point_cp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p1)
screen_point_tp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p1)
screen_point_ap2 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p2)
screen_point_bp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p2)
screen_point_cp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p2)
screen_point_tp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p2)
screen_point_ap3 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p3)
screen_point_bp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p3)
screen_point_cp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p3)
screen_point_tp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p3)
screen_point_dp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p1)
screen_point_dp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p2)
screen_point_dp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p3)
screen_point_ep1 = view3d_utils.location_3d_to_region_2d(region, rv3d, e_p1)
screen_point_ep2 = view3d_utils.location_3d_to_region_2d(region, rv3d, e_p2)
screen_point_ep3 = view3d_utils.location_3d_to_region_2d(region, rv3d, e_p3)
# colour + line setup
bgl.glEnable(bgl.GL_BLEND)
bgl.glLineWidth(1)
# --------------------------------
# Measures
# --------------------------------
if measure is True:
# Vertical
dist = distance(a_p1, t_p1)
txtpoint3d = interpolate3d(a_p1, t_p1, fabs(dist / 2))
gap3d = (a_p2[0], txtpoint3d[1], txtpoint3d[2])
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize, True)
draw_line(screen_point_ap2, screen_point_tp2, rgba)
draw_line(screen_point_ap3, screen_point_ap1, rgba)
draw_line(screen_point_tp3, screen_point_tp1, rgba)
# Horizontal
dist = distance(b_p1, c_p1)
txtpoint3d = interpolate3d(b_p1, c_p1, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], b_p2[2] + 0.02)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_bp2, screen_point_cp2, rgba)
draw_line(screen_point_bp3, screen_point_bp1, rgba)
draw_line(screen_point_cp3, screen_point_cp1, rgba)
# Door size
dist = distance(d_p1, e_p1)
txtpoint3d = interpolate3d(d_p1, e_p1, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] + 0.02)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_dp1, screen_point_ep1, rgba)
draw_line(screen_point_dp2, screen_point_dp3, rgba)
draw_line(screen_point_ep2, screen_point_ep3, rgba)
return
# -------------------------------------------------------------
# Draw window rail information
#
# rgba: Color
# fsize: Font size
# -------------------------------------------------------------
def draw_window_rail_data(myobj, op, region, rv3d, rgba, fsize, space, measure):
# Points
a_p1 = get_point(op.glpoint_a, myobj)
a_p2 = get_point((op.glpoint_a[0] - space, op.glpoint_a[1], op.glpoint_a[2]), myobj)
a_p3 = get_point((op.glpoint_a[0] - space - fsize / 200, op.glpoint_a[1], op.glpoint_a[2]), myobj)
t_p1 = get_point(op.glpoint_b, myobj)
t_p2 = get_point((op.glpoint_b[0] - space, op.glpoint_b[1], op.glpoint_b[2]), myobj)
t_p3 = get_point((op.glpoint_b[0] - space - fsize / 200, op.glpoint_b[1], op.glpoint_b[2]), myobj)
b_p1 = get_point(op.glpoint_b, myobj)
b_p2 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space), myobj)
b_p3 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space + fsize / 200), myobj)
c_p1 = get_point(op.glpoint_c, myobj)
c_p2 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space), myobj)
c_p3 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space + fsize / 200), myobj)
# converting to screen coordinates
screen_point_ap1 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p1)
screen_point_bp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p1)
screen_point_cp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p1)
screen_point_tp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p1)
screen_point_ap2 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p2)
screen_point_bp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p2)
screen_point_cp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p2)
screen_point_tp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p2)
screen_point_ap3 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p3)
screen_point_bp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p3)
screen_point_cp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p3)
screen_point_tp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p3)
# colour + line setup
bgl.glEnable(bgl.GL_BLEND)
bgl.glLineWidth(1)
# --------------------------------
# Measures
# --------------------------------
if measure is True:
# Vertical
dist = distance(a_p1, t_p1)
txtpoint3d = interpolate3d(a_p1, t_p1, fabs(dist / 2))
gap3d = (a_p2[0], txtpoint3d[1], txtpoint3d[2])
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize, True)
draw_line(screen_point_ap2, screen_point_tp2, rgba)
draw_line(screen_point_ap3, screen_point_ap1, rgba)
draw_line(screen_point_tp3, screen_point_tp1, rgba)
# Horizontal
dist = distance(b_p1, c_p1)
txtpoint3d = interpolate3d(b_p1, c_p1, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], b_p2[2] + 0.02)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_bp2, screen_point_cp2, rgba)
draw_line(screen_point_bp3, screen_point_bp1, rgba)
draw_line(screen_point_cp3, screen_point_cp1, rgba)
return
# -------------------------------------------------------------
# Draw window panel information
#
# rgba: Color
# fsize: Font size
# -------------------------------------------------------------
def draw_window_panel_data(myobj, op, region, rv3d, rgba, fsize, space, measure):
# Points
a_p1 = get_point(op.glpoint_a, myobj)
a_p2 = get_point((op.glpoint_a[0] - space, op.glpoint_a[1], op.glpoint_a[2]), myobj)
a_p3 = get_point((op.glpoint_a[0] - space - fsize / 200, op.glpoint_a[1], op.glpoint_a[2]), myobj)
f_p1 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_a[2]), myobj)
f_p2 = get_point((op.glpoint_c[0] + space, op.glpoint_c[1], op.glpoint_a[2]), myobj)
f_p3 = get_point((op.glpoint_c[0] + space + fsize / 200, op.glpoint_c[1], op.glpoint_a[2]), myobj)
t_p1 = get_point(op.glpoint_b, myobj)
t_p2 = get_point((op.glpoint_b[0] - space, op.glpoint_b[1], op.glpoint_b[2]), myobj)
t_p3 = get_point((op.glpoint_b[0] - space - fsize / 200, op.glpoint_b[1], op.glpoint_b[2]), myobj)
b_p1 = get_point(op.glpoint_b, myobj)
b_p2 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space), myobj)
b_p3 = get_point((op.glpoint_b[0], op.glpoint_b[1], op.glpoint_b[2] + space + fsize / 200), myobj)
c_p1 = get_point(op.glpoint_c, myobj)
c_p2 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space), myobj)
c_p3 = get_point((op.glpoint_c[0], op.glpoint_c[1], op.glpoint_c[2] + space + fsize / 200), myobj)
d_p1 = get_point(op.glpoint_c, myobj)
d_p2 = get_point((op.glpoint_c[0] + space, op.glpoint_c[1], op.glpoint_c[2]), myobj)
d_p3 = get_point((op.glpoint_c[0] + space + fsize / 200, op.glpoint_c[1], op.glpoint_c[2]), myobj)
g_p2 = get_point((op.glpoint_d[0], op.glpoint_d[1], 0), myobj)
g_p3 = get_point((op.glpoint_d[0], op.glpoint_d[1], op.glpoint_d[2]), myobj)
g_p4 = get_point((op.glpoint_d[0], op.glpoint_d[1], op.glpoint_d[2] + space), myobj)
g_p5 = get_point((op.glpoint_d[0], op.glpoint_d[1], op.glpoint_d[2] + space + fsize / 200), myobj)
h_p1 = get_point((op.glpoint_a[0], op.glpoint_a[1], op.glpoint_a[2] - space), myobj)
h_p2 = get_point((op.glpoint_a[0], op.glpoint_a[1], op.glpoint_a[2] - space - fsize / 200), myobj)
h_p3 = get_point((op.glpoint_c[0], op.glpoint_a[1], op.glpoint_a[2]), myobj)
h_p4 = get_point((op.glpoint_c[0], op.glpoint_a[1], op.glpoint_a[2] - space), myobj)
h_p5 = get_point((op.glpoint_c[0], op.glpoint_a[1], op.glpoint_a[2] - space - fsize / 200), myobj)
# converting to screen coordinates
screen_point_ap1 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p1)
screen_point_bp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p1)
screen_point_cp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p1)
screen_point_tp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p1)
screen_point_ap2 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p2)
screen_point_bp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p2)
screen_point_cp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p2)
screen_point_tp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p2)
screen_point_ap3 = view3d_utils.location_3d_to_region_2d(region, rv3d, a_p3)
screen_point_bp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, b_p3)
screen_point_cp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, c_p3)
screen_point_tp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, t_p3)
screen_point_dp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p1)
screen_point_dp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p2)
screen_point_dp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, d_p3)
screen_point_fp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, f_p1)
screen_point_fp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, f_p2)
screen_point_fp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, f_p3)
screen_point_gp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, g_p2)
screen_point_gp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, g_p3)
screen_point_gp4 = view3d_utils.location_3d_to_region_2d(region, rv3d, g_p4)
screen_point_gp5 = view3d_utils.location_3d_to_region_2d(region, rv3d, g_p5)
# colour + line setup
bgl.glEnable(bgl.GL_BLEND)
bgl.glLineWidth(1)
# --------------------------------
# Measures
# --------------------------------
if measure is True:
# Vertical (right)
dist = distance(a_p1, t_p1)
txtpoint3d = interpolate3d(a_p1, t_p1, fabs(dist / 2))
gap3d = (a_p2[0], txtpoint3d[1], txtpoint3d[2])
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize, True)
draw_line(screen_point_ap2, screen_point_tp2, rgba)
draw_line(screen_point_ap3, screen_point_ap1, rgba)
draw_line(screen_point_tp3, screen_point_tp1, rgba)
# Vertical (Left)
dist = distance(f_p1, d_p1)
txtpoint3d = interpolate3d(f_p1, d_p1, fabs(dist / 2))
gap3d = (f_p2[0], txtpoint3d[1], txtpoint3d[2])
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_fp2, screen_point_dp2, rgba)
draw_line(screen_point_fp1, screen_point_fp3, rgba)
draw_line(screen_point_dp1, screen_point_dp3, rgba)
# Horizontal (not triangle nor arch)
if op.UST != "4" and op.UST != "2":
dist = distance(b_p1, c_p1)
txtpoint3d = interpolate3d(b_p2, c_p2, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] + 0.05)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_bp2, screen_point_cp2, rgba)
draw_line(screen_point_bp3, screen_point_bp1, rgba)
draw_line(screen_point_cp3, screen_point_cp1, rgba)
else:
dist = distance(b_p1, g_p3)
txtpoint3d = interpolate3d(b_p2, g_p4, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] + 0.05)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize, True)
dist = distance(g_p3, c_p1)
txtpoint3d = interpolate3d(g_p4, c_p2, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] + 0.05)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_bp2, screen_point_gp4, rgba)
draw_line(screen_point_gp4, screen_point_cp2, rgba)
draw_line(screen_point_bp3, screen_point_bp1, rgba)
draw_line(screen_point_cp3, screen_point_cp1, rgba)
draw_line(screen_point_gp3, screen_point_gp5, rgba)
# Only for Triangle or arch
if op.UST == "2" or op.UST == "4":
dist = distance(g_p2, g_p3)
txtpoint3d = interpolate3d(g_p2, g_p3, fabs(dist / 2))
gap3d = (txtpoint3d[0] + 0.05, txtpoint3d[1], txtpoint3d[2])
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_gp2, screen_point_gp3, rgba)
# Only for Triangle and Inclines or arch
if op.UST == "3" or op.UST == "4" or op.UST == "2":
screen_point_hp1 = view3d_utils.location_3d_to_region_2d(region, rv3d, h_p1)
screen_point_hp2 = view3d_utils.location_3d_to_region_2d(region, rv3d, h_p2)
screen_point_hp3 = view3d_utils.location_3d_to_region_2d(region, rv3d, h_p3)
screen_point_hp4 = view3d_utils.location_3d_to_region_2d(region, rv3d, h_p4)
screen_point_hp5 = view3d_utils.location_3d_to_region_2d(region, rv3d, h_p5)
dist = distance(h_p1, h_p3)
txtpoint3d = interpolate3d(h_p1, h_p3, fabs(dist / 2))
gap3d = (txtpoint3d[0], txtpoint3d[1], txtpoint3d[2] - space - 0.05)
txtpoint2d = view3d_utils.location_3d_to_region_2d(region, rv3d, gap3d)
draw_text(txtpoint2d[0], txtpoint2d[1], "%6.2f" % dist, rgba, fsize)
draw_line(screen_point_ap1, screen_point_hp2, rgba)
draw_line(screen_point_hp3, screen_point_hp5, rgba)
draw_line(screen_point_hp1, screen_point_hp4, rgba)
return
# --------------------------------------------------------------------
# Distance between 2 points in 3D space
# v1: first point
# v2: second point
# return: distance
# --------------------------------------------------------------------
def distance(v1, v2):
return sqrt((v2[0] - v1[0]) ** 2 + (v2[1] - v1[1]) ** 2 + (v2[2] - v1[2]) ** 2)
# --------------------------------------------------------------------
# Interpolate 2 points in 3D space
# v1: first point
# v2: second point
# d1: distance
# return: interpolate point
# --------------------------------------------------------------------
def interpolate3d(v1, v2, d1):
# calculate vector
v = (v2[0] - v1[0], v2[1] - v1[1], v2[2] - v1[2])
# calculate distance between points
d0 = distance(v1, v2)
# calculate interpolate factor (distance from origin / distance total)
# if d1 > d0, the point is projected in 3D space
if d0 > 0:
x = d1 / d0
else:
x = d1
final = (v1[0] + (v[0] * x), v1[1] + (v[1] * x), v1[2] + (v[2] * x))
return final
# --------------------------------------------------------------------
# Get point rotated and relative to parent
# v1: point
# mainobject
# --------------------------------------------------------------------
def get_point(v1, mainobject):
# Using World Matrix
vt = Vector((v1[0], v1[1], v1[2], 1))
m4 = mainobject.matrix_world
vt2 = m4 @ vt
v2 = [vt2[0], vt2[1], vt2[2]]
return v2
# --------------------------------------------------------------------
# rotate point EULER X
# v1: point
# rad: Angles of rotation in Radians
# --------------------------------------------------------------------
def rotate_x(v1, rot):
v2 = [0, 0, 0]
radx = rot[0]
# X axis
v2[0] = v1[0]
v2[1] = v1[1] * cos(radx) - v1[2] * sin(radx)
v2[2] = v1[1] * sin(radx) + v1[2] * cos(radx)
return v2
# --------------------------------------------------------------------
# rotate point EULER Y
# v1: point
# rad: Angles of rotation in Radians
# --------------------------------------------------------------------
def rotate_y(v1, rot):
v2 = [0, 0, 0]
rady = rot[1]
# Y axis
v2[0] = v1[0] * cos(rady) + v1[2] * sin(rady)
v2[1] = v1[1]
v2[2] = v1[2] * cos(rady) - v1[0] * sin(rady)
return v2
# --------------------------------------------------------------------
# rotate point EULER Z
# v1: point
# rad: Angles of rotation in Radians
# --------------------------------------------------------------------
def rotate_z(v1, rot):
v2 = [0, 0, 0]
radz = rot[2]
# Z axis
v2[0] = v1[0] * cos(radz) - v1[1] * sin(radz)
v2[1] = v1[0] * sin(radz) + v1[1] * cos(radz)
v2[2] = v1[2]
return v2
| 43.376093
| 119
| 0.594838
|
d8203fe107803869f28bfee8d1f192c89e6dbddf
| 1,011
|
py
|
Python
|
RAISoft/CELIV/ControlPanel.py
|
daveraees/EMA_Test_Lab
|
a3073c5ec205d6ee327a993b38e92698c12cb0a6
|
[
"MIT"
] | null | null | null |
RAISoft/CELIV/ControlPanel.py
|
daveraees/EMA_Test_Lab
|
a3073c5ec205d6ee327a993b38e92698c12cb0a6
|
[
"MIT"
] | null | null | null |
RAISoft/CELIV/ControlPanel.py
|
daveraees/EMA_Test_Lab
|
a3073c5ec205d6ee327a993b38e92698c12cb0a6
|
[
"MIT"
] | null | null | null |
import wx, wx.html
import sys
aboutText = """<p>Sorry, there is no information about this program. It is
running on version %(wxpy)s of <b>wxPython</b> and %(python)s of <b>Python</b>.
See <a href="http://wiki.wxpython.org">wxPython Wiki</a></p>"""
class CELIVcontrolDialog(wx.Dialog):
def __init__(self, parent, script, **kwargs):
self.script = script
wx.Dialog.__init__(self, None, -1, "About <<project>>",
style=wx.DEFAULT_DIALOG_STYLE|wx.THICK_FRAME|wx.RESIZE_BORDER|
wx.TAB_TRAVERSAL)
hwin = wx.html.HtmlWindow(self, -1, size=(400,200))
vers = {}
vers["python"] = sys.version.split()[0]
vers["wxpy"] = wx.VERSION_STRING
hwin.SetPage(aboutText % vers)
btn = hwin.FindWindowById(wx.ID_OK)
irep = hwin.GetInternalRepresentation()
hwin.SetSize((irep.GetWidth()+25, irep.GetHeight()+10))
self.SetClientSize(hwin.GetSize())
self.CentreOnParent(wx.BOTH)
self.SetFocus()
| 34.862069
| 79
| 0.630069
|
254085522fb1ef09e0c55d729a444b13bd2931e5
| 11,047
|
py
|
Python
|
pip_services3_mongodb/persistence/IdentifiableMongoDbPersistence.py
|
pip-services3-python/pip-services3-mongodb-python
|
81ff75d6eab96afb7065a403e902b7f2992b8662
|
[
"MIT"
] | null | null | null |
pip_services3_mongodb/persistence/IdentifiableMongoDbPersistence.py
|
pip-services3-python/pip-services3-mongodb-python
|
81ff75d6eab96afb7065a403e902b7f2992b8662
|
[
"MIT"
] | 1
|
2020-11-03T00:57:01.000Z
|
2020-11-10T20:17:49.000Z
|
pip_services3_mongodb/persistence/IdentifiableMongoDbPersistence.py
|
pip-services3-python/pip-services3-mongodb-python
|
81ff75d6eab96afb7065a403e902b7f2992b8662
|
[
"MIT"
] | 1
|
2020-03-19T21:37:43.000Z
|
2020-03-19T21:37:43.000Z
|
# -*- coding: utf-8 -*-
"""
pip_services3_mongodb.persistence.IdentifiableMongoDbPersistence
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Identifiable MongoDb persistence implementation
:copyright: Conceptual Vision Consulting LLC 2018-2019, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from copy import deepcopy
from typing import Any, Optional, List, TypeVar
import pymongo
from pip_services3_commons.data import AnyValueMap, IdGenerator
from .MongoDbPersistence import MongoDbPersistence
T = TypeVar('T') # Declare type variable
class IdentifiableMongoDbPersistence(MongoDbPersistence):
"""
Abstract persistence component that stores data in MongoDB
and implements a number of CRUD operations over data items with unique ids.
The data items must implement IIdentifiable interface.
In basic scenarios child classes shall only override :func:`get_page_by_filter <pip_services3_mongodb.persistence.MongoDbPersistence.get_page_by_filter>`,
:func:`get_list_by_filter` or :func:`delete_by_filter` operations with specific filter function.
All other operations can be used out of the box.
In complex scenarios child classes can implement additional operations by
accessing **self.__collection** and **self.__model** properties.
### Configuration parameters ###
- connection(s):
- discovery_key: (optional) a key to retrieve the connection from :class:`IDiscovery <pip_services3_components.connect.IDiscovery.IDiscovery>`
- host: host name or IP address
- port: port number (default: 27017)
- uri: resource URI or connection string with all parameters in it
- credential(s):
- store_key: (optional) a key to retrieve the credentials from :class:`ICredentialStore <pip_services3_components.auth.ICredentialStore.ICredentialStore>`
- username: (optional) user name
- password: (optional) user password
- options:
- max_pool_size: (optional) maximum connection pool size (default: 2)
- keep_alive: (optional) enable connection keep alive (default: true)
- connect_timeout: (optional) connection timeout in milliseconds (default: 5000)
- socket_timeout: (optional) socket timeout in milliseconds (default: 360000)
- auto_reconnect: (optional) enable auto reconnection (default: true)
- reconnect_interval: (optional) reconnection interval in milliseconds (default: 1000)
- max_page_size: (optional) maximum page size (default: 100)
- replica_set: (optional) name of replica set
- ssl: (optional) enable SSL connection (default: false)
- auth_source: (optional) authentication source
- debug: (optional) enable debug output (default: false).
### References ###
- `*:logger:*:*:1.0` (optional) :class:`ILogger <pip_services3_components.log.ILogger.ILogger>` components to pass log messages components to pass log messages
- `*:discovery:*:*:1.0` (optional) :class:`IDiscovery <pip_services3_components.connect.IDiscovery.IDiscovery>` services
- `*:credential-store:*:*:1.0` (optional) :class:`ICredentialStore <pip_services3_components.auth.ICredentialStore.ICredentialStore>` stores to resolve credentials
Example:
.. code-block:: python
class MyMongoDbPersistence(MongoDbPersistence):
def __init__(self):
super(MyMongoDbPersistence, self).__init__("mydata", MyData)
def get_page_by_filter(self, correlation_id, filter, paging, sort = None, select = None):
super().def get_page_by_filter(correlation_id, filter, paging, None, None):
persistence = MyMongoDbPersistence()
persistence.configure(ConfigParams.from_tuples("host", "localhost", "port", 27017))
persitence.open("123")
persistence.create("123", { id: "1", name: "ABC" })
mydata = persistence.get_page_by_filter("123", FilterParams.from_tuples("name", "ABC"), None, None)
print mydata
persistence.delete_by_id("123", "1")
# ...
"""
def __init__(self, collection: str = None):
"""
Creates a new instance of the persistence component.
:param collection: (optional) a collection name.
"""
super(IdentifiableMongoDbPersistence, self).__init__(collection)
# Flag to turn on automated string ID generation
self._auto_generate_id: bool = True
def _convert_from_public_partial(self, value: Any) -> Any:
"""
Converts the given object from the public partial format.
:param value: the object to convert from the public partial format.
:return: the initial object.
"""
return self._convert_from_public(value)
def get_list_by_ids(self, correlation_id: Optional[str], ids: List[Any]) -> List[T]:
"""
Gets a list of data items retrieved by given unique ids.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param ids: ids of data items to be retrieved
:return: a data list of results by ids.
"""
filters = {'_id': {'$in': ids}}
return self.get_list_by_filter(correlation_id, filters)
def get_one_by_id(self, correlation_id: Optional[str], id: Any) -> T:
"""
Gets a data item by its unique id.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param id: an id of data item to be retrieved.
:return: data item by id.
"""
item = self._collection.find_one({'_id': id})
if item is None:
self._logger.trace(correlation_id, "Nothing found from %s with id = %s", self._collection_name, id)
else:
self._logger.trace(correlation_id, "Retrieved from %s with id = %s", self._collection_name, id)
item = self._convert_to_public(item)
return item
def create(self, correlation_id: Optional[str], item: T) -> T:
"""
Creates a data item.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param item: an item to be created.
:return: a created item
"""
item = self._convert_from_public(item)
new_item = dict(item)
# Replace _id or generate a new one
new_item['_id'] = new_item.pop('id', None)
if new_item['_id'] is None and self._auto_generate_id:
new_item['_id'] = IdGenerator.next_long()
return super().create(correlation_id, new_item)
def set(self, correlation_id: Optional[str], item: T) -> T:
"""
Sets a data item. If the data item exists it updates it, otherwise it create a new data item.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param item: an item to be set.
:return: an updated item
"""
item = self._convert_from_public(item)
new_item = dict(item)
# Replace _id or generate a new one
new_item['_id'] = new_item.pop('id', None)
if new_item['_id'] is None and self._auto_generate_id:
new_item['_id'] = IdGenerator.next_long()
_id = new_item['_id']
new_item = self._convert_from_public(new_item)
item = self._collection.find_one_and_update(
{'_id': _id}, {'$set': new_item},
return_document=pymongo.ReturnDocument.AFTER,
upsert=True
)
if item is not None:
self._logger.trace(correlation_id, "Set in %s with id = %s", self._collection_name, item['id'])
item = self._convert_to_public(item)
return item
def update(self, correlation_id: Optional[str], item: T) -> Optional[T]:
"""
Updates a data item.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param item: an item to be updated.
:return: an updated item.
"""
if item is None or item.id is None:
return
new_item = deepcopy(item)
new_item = self._convert_from_public(new_item)
_id = item.id
result = self._collection.find_one_and_update(
{'_id': _id}, {'$set': new_item},
return_document=pymongo.ReturnDocument.AFTER
)
new_item = self._convert_to_public(result)
self._logger.trace(correlation_id, "Updated in %s with id = %s", self._collection_name, new_item.id)
return new_item
def update_partially(self, correlation_id: Optional[str], id: Any, data: AnyValueMap) -> T:
"""
Updates only few selected fields in a data item.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param id: an id of data item to be updated.
:param data: a map with fields to be updated.
:return: an updated item.
"""
new_item = data.get_as_object() if isinstance(data, AnyValueMap) else dict(data)
new_item.pop('_id', None)
new_item.pop('id', None)
new_item = self._convert_from_public_partial(new_item)
item = self._collection.find_one_and_update(
{'_id': id}, {'$set': new_item},
return_document=pymongo.ReturnDocument.AFTER
)
self._logger.trace(correlation_id, "Updated partially in %s with id = %s", self._collection_name, id)
item = self._convert_to_public(item)
return item
# The method must return deleted value to be able to do clean up like removing references
def delete_by_id(self, correlation_id: Optional[str], id: Any) -> T:
"""
Deleted a data item by it's unique id.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param id: an id of the item to be deleted
:return: a deleted item.
"""
item = self._collection.find_one_and_delete({'_id': id})
self._logger.trace(correlation_id, "Deleted from %s with id = %s", self._collection_name, id)
item = self._convert_to_public(item)
return item
def delete_by_ids(self, correlation_id: Optional[str], ids: List[Any]):
"""
Deletes multiple data items by their unique ids.
:param correlation_id: (optional) transaction id to trace execution through call chain.
:param ids: ids of data items to be deleted.
"""
filter = {'_id': {'$in': ids}}
self.delete_by_filter(correlation_id, filter)
| 40.170909
| 182
| 0.631755
|
9c35006ca79179056d8898a34bff925c742ed73b
| 4,793
|
py
|
Python
|
isi_sdk_7_2/isi_sdk_7_2/models/snapshot_aliases_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_7_2/isi_sdk_7_2/models/snapshot_aliases_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_7_2/isi_sdk_7_2/models/snapshot_aliases_extended.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 2
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from isi_sdk_7_2.models.snapshot_alias_extended import SnapshotAliasExtended # noqa: F401,E501
class SnapshotAliasesExtended(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'resume': 'str',
'snapshots': 'list[SnapshotAliasExtended]',
'total': 'int'
}
attribute_map = {
'resume': 'resume',
'snapshots': 'snapshots',
'total': 'total'
}
def __init__(self, resume=None, snapshots=None, total=None): # noqa: E501
"""SnapshotAliasesExtended - a model defined in Swagger""" # noqa: E501
self._resume = None
self._snapshots = None
self._total = None
self.discriminator = None
if resume is not None:
self.resume = resume
if snapshots is not None:
self.snapshots = snapshots
if total is not None:
self.total = total
@property
def resume(self):
"""Gets the resume of this SnapshotAliasesExtended. # noqa: E501
Resume token value to use in subsequent calls for continuation. # noqa: E501
:return: The resume of this SnapshotAliasesExtended. # noqa: E501
:rtype: str
"""
return self._resume
@resume.setter
def resume(self, resume):
"""Sets the resume of this SnapshotAliasesExtended.
Resume token value to use in subsequent calls for continuation. # noqa: E501
:param resume: The resume of this SnapshotAliasesExtended. # noqa: E501
:type: str
"""
self._resume = resume
@property
def snapshots(self):
"""Gets the snapshots of this SnapshotAliasesExtended. # noqa: E501
:return: The snapshots of this SnapshotAliasesExtended. # noqa: E501
:rtype: list[SnapshotAliasExtended]
"""
return self._snapshots
@snapshots.setter
def snapshots(self, snapshots):
"""Sets the snapshots of this SnapshotAliasesExtended.
:param snapshots: The snapshots of this SnapshotAliasesExtended. # noqa: E501
:type: list[SnapshotAliasExtended]
"""
self._snapshots = snapshots
@property
def total(self):
"""Gets the total of this SnapshotAliasesExtended. # noqa: E501
Total number of items available. # noqa: E501
:return: The total of this SnapshotAliasesExtended. # noqa: E501
:rtype: int
"""
return self._total
@total.setter
def total(self, total):
"""Sets the total of this SnapshotAliasesExtended.
Total number of items available. # noqa: E501
:param total: The total of this SnapshotAliasesExtended. # noqa: E501
:type: int
"""
self._total = total
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SnapshotAliasesExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.02924
| 95
| 0.586898
|
74ef8d86b2d9d2c640b8495b8a5a9f76ad5344e2
| 6,501
|
py
|
Python
|
kubernetes_asyncio/client/models/v1alpha1_subject.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1alpha1_subject.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
kubernetes_asyncio/client/models/v1alpha1_subject.py
|
aK0nshin/kubernetes_asyncio
|
aef9edcc1f8671a5b1bba9f4684bde890176b19c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: v1.14.7
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
class V1alpha1Subject(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'name': 'str',
'namespace': 'str'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'name': 'name',
'namespace': 'namespace'
}
def __init__(self, api_version=None, kind=None, name=None, namespace=None): # noqa: E501
"""V1alpha1Subject - a model defined in OpenAPI""" # noqa: E501
self._api_version = None
self._kind = None
self._name = None
self._namespace = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
self.kind = kind
self.name = name
if namespace is not None:
self.namespace = namespace
@property
def api_version(self):
"""Gets the api_version of this V1alpha1Subject. # noqa: E501
APIVersion holds the API group and version of the referenced subject. Defaults to \"v1\" for ServiceAccount subjects. Defaults to \"rbac.authorization.k8s.io/v1alpha1\" for User and Group subjects. # noqa: E501
:return: The api_version of this V1alpha1Subject. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1alpha1Subject.
APIVersion holds the API group and version of the referenced subject. Defaults to \"v1\" for ServiceAccount subjects. Defaults to \"rbac.authorization.k8s.io/v1alpha1\" for User and Group subjects. # noqa: E501
:param api_version: The api_version of this V1alpha1Subject. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1alpha1Subject. # noqa: E501
Kind of object being referenced. Values defined by this API group are \"User\", \"Group\", and \"ServiceAccount\". If the Authorizer does not recognized the kind value, the Authorizer should report an error. # noqa: E501
:return: The kind of this V1alpha1Subject. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1alpha1Subject.
Kind of object being referenced. Values defined by this API group are \"User\", \"Group\", and \"ServiceAccount\". If the Authorizer does not recognized the kind value, the Authorizer should report an error. # noqa: E501
:param kind: The kind of this V1alpha1Subject. # noqa: E501
:type: str
"""
if kind is None:
raise ValueError("Invalid value for `kind`, must not be `None`") # noqa: E501
self._kind = kind
@property
def name(self):
"""Gets the name of this V1alpha1Subject. # noqa: E501
Name of the object being referenced. # noqa: E501
:return: The name of this V1alpha1Subject. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1alpha1Subject.
Name of the object being referenced. # noqa: E501
:param name: The name of this V1alpha1Subject. # noqa: E501
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def namespace(self):
"""Gets the namespace of this V1alpha1Subject. # noqa: E501
Namespace of the referenced object. If the object kind is non-namespace, such as \"User\" or \"Group\", and this value is not empty the Authorizer should report an error. # noqa: E501
:return: The namespace of this V1alpha1Subject. # noqa: E501
:rtype: str
"""
return self._namespace
@namespace.setter
def namespace(self, namespace):
"""Sets the namespace of this V1alpha1Subject.
Namespace of the referenced object. If the object kind is non-namespace, such as \"User\" or \"Group\", and this value is not empty the Authorizer should report an error. # noqa: E501
:param namespace: The namespace of this V1alpha1Subject. # noqa: E501
:type: str
"""
self._namespace = namespace
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1alpha1Subject):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 32.343284
| 229
| 0.599446
|
4738d4a01f86b126304d06043d5766d50e75b7d2
| 1,993
|
py
|
Python
|
project3/problem-4/sort_012.py
|
jurayev/algorithms-datastructures-udacity
|
e29149d8fdf65c27a8ceed205b6640e6a4c36e5a
|
[
"MIT"
] | 1
|
2020-05-08T17:52:19.000Z
|
2020-05-08T17:52:19.000Z
|
project3/problem-4/sort_012.py
|
jurayev/algorithms-datastructures-udacity
|
e29149d8fdf65c27a8ceed205b6640e6a4c36e5a
|
[
"MIT"
] | null | null | null |
project3/problem-4/sort_012.py
|
jurayev/algorithms-datastructures-udacity
|
e29149d8fdf65c27a8ceed205b6640e6a4c36e5a
|
[
"MIT"
] | null | null | null |
"""
For this problem single traversal with 3 pointers approach is used. Single traversal is meant to have order of Runtime O(n).
The sorting is performed in-place, so Space order is O(1).
Complexities:
* `Runtime O(n)`, where n is the input list size. This is achieved by the following: the sort_012(input_list) implements a while loop
that traverses the input list exactly n-times by keeping track of the two index pointers, front_index and next_two.
One of the pointers is incremented by one (decrement operation for the case of next_two pointer) at the time for each while loop
iteration. Once front_index and next_two are met each other we know that single traversal of n-times is done.
* `Space O(1)`, in-place sorting requires no additional space
"""
def sort_012(input_list):
"""
Given an input array consisting on only 0, 1, and 2, sort the array in a single traversal.
Args:
input_list(list): List to be sorted
"""
next_zero = 0
next_two = len(input_list) - 1
front_index = 0
while front_index <= next_two:
if input_list[front_index] == 0:
input_list[front_index], input_list[next_zero] = input_list[next_zero], 0
next_zero += 1
front_index += 1
elif input_list[front_index] == 2:
input_list[front_index], input_list[next_two] = input_list[next_two], 2
next_two -= 1
else:
front_index += 1
def _test_function(test_case):
sort_012(test_case)
if test_case == sorted(test_case):
print("TEST PASSED")
else:
print("TEST FAILED")
print("-"*10, "BEGIN TESTING", "-"*10, "\n")
_test_function([0, 0, 2, 2, 2, 1, 1, 1, 2, 0, 2])
_test_function([2, 1, 2, 0, 0, 2, 1, 0, 1, 0, 0, 2, 2, 2, 1, 2, 0, 0, 0, 2, 1, 0, 2, 0, 0, 1])
_test_function([0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2])
_test_function([2, 0, 1])
_test_function([2, 2, 2, 2, 2, 0, 1, 2, 2, 2, 2])
print("\n", "-"*10, "TESTING COMPLETED", "-"*10)
| 39.078431
| 133
| 0.646262
|
a7e324ea8e7384cb8acef4baa32fca09e8e560d9
| 649
|
py
|
Python
|
lambda_cron/__init__.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 22
|
2017-10-27T11:37:58.000Z
|
2021-11-09T09:35:37.000Z
|
lambda_cron/__init__.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 1
|
2018-03-21T18:31:01.000Z
|
2018-03-21T18:31:01.000Z
|
lambda_cron/__init__.py
|
MediaMath/lambda-cron
|
2545e9fdeced7ebeaba2f98d02891cc6db7546e2
|
[
"Apache-2.0"
] | 3
|
2017-10-27T16:49:42.000Z
|
2018-11-03T04:14:10.000Z
|
# Copyright (C) 2016 MediaMath <http://www.mediamath.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.2.1'
__all__ = ["cli", "aws"]
| 36.055556
| 74
| 0.747304
|
bbca3b76d699a530d41f1f982b30658f4dc44d5a
| 557
|
py
|
Python
|
05_scrapy/python_04/python_04/items.py
|
2018-B-GR1-Python/Velasco-Yepez-Andres-David
|
0c017d6e5f169f31207ddec5ceffc8dd82d327eb
|
[
"MIT"
] | null | null | null |
05_scrapy/python_04/python_04/items.py
|
2018-B-GR1-Python/Velasco-Yepez-Andres-David
|
0c017d6e5f169f31207ddec5ceffc8dd82d327eb
|
[
"MIT"
] | null | null | null |
05_scrapy/python_04/python_04/items.py
|
2018-B-GR1-Python/Velasco-Yepez-Andres-David
|
0c017d6e5f169f31207ddec5ceffc8dd82d327eb
|
[
"MIT"
] | 1
|
2019-10-21T19:27:12.000Z
|
2019-10-21T19:27:12.000Z
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
from scrapy.loader.processors import MapCompose
def shorten_amazon_link(link):
id_producto = link.split('/')[-1] # ultimo elemento
short_link = 'https://www.amazon.com/dp/' + id_producto
return short_link
class ProductoItem(scrapy.Item):
titulo = scrapy.Field()
precio = scrapy.Field()
link = scrapy.Field(
input_processor=MapCompose(shorten_amazon_link)
)
| 25.318182
| 57
| 0.712747
|
ae7fd7e24c799ac776f1f4de1e7421261821df8f
| 33,041
|
py
|
Python
|
lib/capablerobot_usbhub.py
|
d-c-d/CapableRobot_USBHub_Firmware
|
ec66a90dfc572a701b62129fdfa1774e5d732c8e
|
[
"MIT"
] | 1
|
2019-05-14T17:48:03.000Z
|
2019-05-14T17:48:03.000Z
|
lib/capablerobot_usbhub.py
|
d-c-d/CapableRobot_USBHub_Firmware
|
ec66a90dfc572a701b62129fdfa1774e5d732c8e
|
[
"MIT"
] | null | null | null |
lib/capablerobot_usbhub.py
|
d-c-d/CapableRobot_USBHub_Firmware
|
ec66a90dfc572a701b62129fdfa1774e5d732c8e
|
[
"MIT"
] | null | null | null |
# The MIT License (MIT)
#
# Copyright (c) 2019 Chris Osterwood for Capable Robot Components
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import time
import os, sys
import board
import digitalio
import analogio
import supervisor
import microcontroller
from micropython import const
from adafruit_bus_device.i2c_device import I2CDevice
import capablerobot_eeprom
import capablerobot_ucs2113
# pylint: disable=bad-whitespace
__version__ = const(0x02)
_I2C_ADDR_USB = const(0x2D)
_REVISION = const(0x0000)
_RMT_IDENT = const(0x0904)
_RMT_READ = const(0x0914)
_RMT_WRITE = const(0x0924)
_VENDOR_ID = const(0x3000)
_PRODUCT_ID = const(0x3002)
_DEVICE_ID = const(0x3004)
_HUB_CONFIG_1 = const(0x3006)
_HUB_CONFIG_2 = const(0x3007)
_HUB_CONFIG_3 = const(0x3008)
_PORT_SWAP = const(0x30FA)
_HUB_CTRL = const(0x3104)
_SUSPEND = const(0x3197)
_POWER_STATUS = const(0x30E5)
_REMAP_12 = const(0x30FB)
_REMAP_34 = const(0x30FC)
_POWER_STATE = const(0x3100)
_CONNECTION = const(0x3194)
_DEVICE_SPEED = const(0x3195)
_POWER_SELECT_1 = const(0x3C00)
_POWER_SELECT_2 = const(0x3C04)
_POWER_SELECT_3 = const(0x3C08)
_POWER_SELECT_4 = const(0x3C0C)
_CHARGE_CONFIG = const(0x343C)
## Registers here are under BFD2 base address
_MANUF_DESC = const(0x3204)
_PRODUCT_DESC = const(0x3244)
_MANUF_DESC_LEN = const(0x346A)
_PRODUCT_DESC_LEN = const(0x3472)
_MANUF_STRING = "Capable Robot Components"
_CFG_REG_CMD = bytearray([0x99, 0x37, 0x00])
_DEFAULT_PORT_MAP = [1, 2, 3, 4]
_CUSTOM_PORT_MAP = [2, 4, 1, 3]
_I2C_ADDR_MCP = const(0x20)
_GPIO = const(0x09)
_CRC8_POLYNOMIAL = const(0x31)
_CRC8_INIT = const(0xFF)
_CMD_NOOP = const(0b000)
_CMD_GET = const(0b001)
_CMD_SET = const(0b010)
_CMD_SAVE = const(0b100)
_CMD_RESET = const(0b111)
_CMD_RESET_USB = const(0x00)
_CMD_RESET_MCU = const(0x01)
_CMD_RESET_BOOTLOADER = const(0x02)
## Floats are transmitted and persisted as integers with the following
## mutiplier. Any configuration key in this array will be scaled
## by the value here upon receipt / transmission.
_FLOAT_SCALE = 100
_FLOAT_KEYS = ["loop_delay"]
_STRING_KEYS = ["descriptor_custom"]
_CFG_DATA_STATE = const(0x05)
# _CFG_POWER_ERRORS = const(0x06)
_CFG_POWER_LIMITS = const(0x07)
_CFG_POWER_MEASURE_12 = const(0x08)
_CFG_POWER_MEASURE_34 = const(0x09)
_CFG_HIGHSPEED_DISABLE = const(0x10)
_CFG_LOOP_DELAY = const(0x11)
_CFG_EXTERNAL_HEARTBEAT = const(0x12)
_CFG_RESET_ON_DELAY = const(0x13)
_CFG_RESET_ON_LINK_LOSS = const(0x14)
_CFG_LINK_LOSS_DELAY = const(0x15)
_CFG_DESCRIPTOR_CONFIG = const(0x16)
_CFG_DESCRIPTOR_CUSTOM = const(0x17)
_DESCRIPTOR_MPN_REV_SERIAL = const(0x00)
_DESCRIPTOR_MPN_REV = const(0x01)
_DESCRIPTOR_CUSTOM = const(0x02)
_PCBREV_VALUES = [None, None, 32767]
_PCBREV_TOLERANCE = 1000
# pylint: enable=bad-whitespace
def stdout(*args):
if supervisor.runtime.serial_connected:
print(" ", *args)
def _process_find_key(name):
if name == _CFG_HIGHSPEED_DISABLE:
return "highspeed_disable"
if name == _CFG_LOOP_DELAY:
return "loop_delay"
if name == _CFG_EXTERNAL_HEARTBEAT:
return "external_heartbeat"
if name == _CFG_RESET_ON_DELAY:
return "reset_on_delay"
## Flag to set of the Hub will reset the USB4715 if a upstream link loss is detected
if name == _CFG_RESET_ON_LINK_LOSS:
return "reset_on_link_loss"
## Seconds that upstream link can be down before resetting the hub
if name == _CFG_LINK_LOSS_DELAY:
return "link_loss_delay"
if name == _CFG_DESCRIPTOR_CONFIG:
return "descriptor_config"
if name == _CFG_DESCRIPTOR_CUSTOM:
return "descriptor_custom"
return None
def _register_length(addr):
if addr in [_REVISION, _RMT_IDENT, _RMT_READ, _RMT_WRITE]:
return 4
if addr in [_VENDOR_ID, _PRODUCT_ID, _DEVICE_ID, _POWER_STATE]:
return 2
return 1
def _register_base_address_lsb(addr):
if addr in [_MANUF_DESC, _PRODUCT_DESC, _MANUF_DESC_LEN, _PRODUCT_DESC_LEN]:
return 0xD2
return 0x80
def _generate_crc(data):
crc = _CRC8_INIT
for byte in data:
crc ^= byte
for _ in range(8):
if crc & 0x80:
crc = (crc << 1) ^ _CRC8_POLYNOMIAL
else:
crc <<= 1
return crc & 0xFF
def _string_to_utf16le(string):
out = bytearray(len(string)*2)
for idx,c in enumerate(string):
out[idx*2] = ord(c)
return list(out)
def _utf16le_to_string(data):
out = ""
for idx in range(len(data)/2):
out += chr(data[idx*2])
return out
def _bytearry_to_bcd(b):
bcd = list(b)
return "{:02x}.{:02x}".format(bcd[1], bcd[0])
def _bytearry_to_int(b, lsb_first=True):
if lsb_first:
x = 0
shift = 0
for char in b:
x |= (char << shift*8)
shift += 1
else:
x = 0
for char in b:
x <<= 8
x |= char
return x
def set_bit(value, bit):
return value | (1<<bit)
def clear_bit(value, bit):
return value & ~(1<<bit)
def get_bit(value, bit):
return (value & (1<<bit)) > 0
class USBHub:
# Class-level buffer to reduce memory usage and allocations.
# Note this is NOT thread-safe or re-entrant by design.
_BUFFER = bytearray(9)
def __init__(self, i2c1_bus, i2c2_bus, force=False):
## Setup pins so that statue upon switchign to output
## is identical to the board electrical default. This
## allows object to be created an no state change occur.
self.pin_rst = digitalio.DigitalInOut(board.USBRESET)
self.pin_rst.switch_to_output(value=True)
self.pin_hen = digitalio.DigitalInOut(board.USBHOSTEN)
self.pin_hen.switch_to_output(value=False)
try:
self.pin_bcen = digitalio.DigitalInOut(board.USBBCEN)
self.pin_bcen.switch_to_output(value=False)
except AttributeError:
stdout("WARN : Firmware does not define pin for battery charge configuration")
self.pin_bcen = None
self.vlim = analogio.AnalogIn(board.ANVLIM)
self.vlogic = analogio.AnalogIn(board.AN5V)
self.i2c_device = I2CDevice(i2c2_bus, _I2C_ADDR_USB, probe=False)
self.mcp_device = I2CDevice(i2c1_bus, _I2C_ADDR_MCP, probe=False)
self.eeprom = capablerobot_eeprom.EEPROM(i2c1_bus, '24AA025E48')
self.ucs = capablerobot_ucs2113.Ports(i2c1_bus)
## Load default configuration and then check filesystem for INI file to update it
self.config = dict(
highspeed_disable = False,
loop_delay = 0.1,
external_heartbeat = False,
force = False,
reset_on_delay = False,
reset_on_link_loss = True,
link_loss_delay = 30,
descriptor_config = _DESCRIPTOR_MPN_REV_SERIAL,
descriptor_custom = ''
)
self._update_config_from_ini()
self._last_poll_time = None
self._pcb_revision = None
self._power_control_registers = None
## Here we are using the port remapping to determine if the hub IC
## has been previously configured. If so, we don't need to reset
## it or configure it and can just control it as-is.
##
## If the hub has not been configured (e.g. when the board is first
## powered on), this call will raise an OSError. That will then trigger
## the normal reset & configure process.
try:
self.remap = self.get_port_remap()
stdout("Hub IC has been configured")
except OSError:
self.remap = _DEFAULT_PORT_MAP
stdout("Hub IC is in default state")
if self.remap == _DEFAULT_PORT_MAP or force or self.config['force']:
stdout("Resetting and configuring Hub IC")
self.reset()
self.configure()
self.set_mcp_config()
self.setup_host_comms()
def _write_register(self, address, xbytes, do_register_length_check=True):
if do_register_length_check and len(xbytes) != _register_length(address):
raise ValueError("Incorrect payload length for register %d" % address)
## 2 bytes for 'write' and count
## 4 bytes for address (base + offset)
## num bytes actually get written
length = 2+4+len(xbytes)
## Prepare the pre-amble
out = [
0x00,
0x00,
length, # Length of rest of packet
0x00, # Write configuration register
len(xbytes) & 0xFF, # Will be writing N bytes (later)
0xBF, _register_base_address_lsb(address),
(address >> 8) & 0xFF, address & 0xFF
]
with self.i2c_device as i2c:
## Write the pre-amble and then the payload
i2c.write(bytearray(out+xbytes))
## Execute the Configuration Register Access command
i2c.write(_CFG_REG_CMD)
def _read_register(self, address, length=0):
if length == 0:
length = _register_length(address)
## Prepare the pre-amble
out = [
0x00,
0x00,
0x06, # Length of rest of packet
0x01, # Read configuration register
length & 0xFF, # Will be reading N bytes (later)
0xBF, _register_base_address_lsb(address),
(address >> 8) & 0xFF, address & 0xFF
]
inbuf = bytearray(length+1)
with self.i2c_device as i2c:
## Write the pre-amble
i2c.write(bytearray(out))
## Execute the Configuration Register Access command
i2c.write(_CFG_REG_CMD)
## Access the part of memory where our data is
i2c.write_then_readinto(bytearray([0x00, 0x06]), inbuf)
## First byte is the length of the rest of the message.
## We don't want to return that to the caller
return inbuf[1:length+1]
# pylint: disable=invalid-name
@property
def id(self):
buf = self._read_register(_REVISION)
device_id = (buf[3] << 8) + buf[2]
revision_id = buf[0]
return device_id, revision_id
@property
def vendor_id(self):
return _bytearry_to_int(self._read_register(_VENDOR_ID))
@property
def product_id(self):
return _bytearry_to_int(self._read_register(_PRODUCT_ID))
## bcdDevice is R/W during CFG_SOC, but is written to during
## the CFG_OTP stage of the boot process, meaning that any
## writes to this register prior to attach will be lost.
@property
def device_id(self):
return _bytearry_to_bcd(self._read_register(_DEVICE_ID))
@property
def unit_mpn(self):
return self.eeprom.sku
@property
def unit_sku(self):
return self.eeprom.sku
@property
def unit_revision(self):
if self._pcb_revision is None:
try:
pin_rev = analogio.AnalogIn(board.PCBREV)
values = [pin_rev.value for _ in range(10)]
for idx, target in enumerate(_PCBREV_VALUES):
if target is None:
continue
if min(values) > target - _PCBREV_TOLERANCE and max(values) < target + _PCBREV_TOLERANCE:
# stdout("PCBREV pin reads {} to {}, REV is {}".format(min(values), max(values), idx))
self._pcb_revision = idx
except AttributeError:
# stdout("No PCBREV pin defined")
pass
if self._pcb_revision is None:
# stdout("Falling back to EEPROM lookup")
self._pcb_revision = self.eeprom.revision
return self._pcb_revision
@property
def unit_serial(self):
return self.eeprom.serial
@property
def speeds(self):
conn = _bytearry_to_int(self._read_register(_CONNECTION))
speed = _bytearry_to_int(self._read_register(_DEVICE_SPEED))
out = [0]*5
## Have to follow logical to physical remapping
for idx, port in enumerate(self.remap):
out[port] = (speed >> (idx*2)) & 0b11
## Upstream port is not in the speed register, so take data from
## the connection register. Unfortunately, no way to know speed.
out[0] = (conn & 0b1)*3
return out
def attach(self):
## 0xAA 0x55 : Exit SOC_CONFIG and Enter HUB_CONFIG stage
## 0xAA 0x56 : Exit SOC_CONFIG and Enter HUB_CONFIG stage with SMBus slave enabled
out = [0xAA, 0x56, 0x00]
with self.i2c_device as i2c:
i2c.write(bytearray(out))
def reset(self):
time.sleep(0.05)
if self.pin_bcen is not None:
# Turn on 10 ohm resistor for charge strapping
self.pin_bcen.value = True
# Put in reset for at least 10 ms
self.pin_rst.value = False
time.sleep(0.05)
# Must wait at least 1 ms after RESET_N deassertion for straps to be read
# Testing has found this delay must be MUCH longer than 1 ms for subsequent
# I2C calls to suceed.
self.pin_rst.value = True
time.sleep(0.05)
if self.pin_bcen is not None:
# Turn 10 ohm resistor off, so that SPI bus can operate properly
self.pin_bcen.value = False
def configure(self, **opts):
highspeed_disable = False
if "highspeed_disable" in self.config.keys():
highspeed_disable = self.config["highspeed_disable"]
if "highspeed_disable" in opts.keys():
highspeed_disable = opts["highspeed_disable"]
self.set_hub_config_1(highspeed_disable=highspeed_disable, multitt_enable=True)
## Reverse DP/DM pints of upstream port and ports 3 & 4
self.set_port_swap(values=[True, False, False, True, True])
self.set_hub_control(lpm_disable=True)
self.set_hub_config_3(port_map_enable=True, string_descriptor_enable=True)
## Remap ports so that case physcial markings match the USB
self.set_port_remap(ports=_CUSTOM_PORT_MAP)
self.set_charging_config()
self.set_hub_descriptors()
self.attach()
## Sleep here is needed to allow the I2C2 bus to resume normal state.
## If communications are attempted immediately, the MCU will see a low SCL pin.
##
## After reporting this unexpected behavior to Microchip, they recommended increasing
## the delay (prior to starting I2C traffic) to 100 ms. This guarantees the controller
## inside the USB4715 to completes internal processes and will not hang the I2C bus.
time.sleep(0.1)
## Set the device id (available via the bcdDevice USB descriptor) to "C.REV" where REV
## is pulled from the Hub's EEPROM. This allows the host-side driver to determine the
## Hub hardware revision, which is need for some monitoring functions and operations.
self.set_device_id()
def upstream(self, state):
self.pin_hen.value = not state
def _update_config_from_ini(self):
## If the INI file does not exist, the 'open' will
## raise an OSError, so we catch that here.
try:
with open("/config.ini", 'r') as f:
stdout("Loading config.ini from filesystem")
line = f.readline()
while line != '':
## Skip commented out lines
if line[0] == ';':
line = f.readline()
continue
key, value = [w.strip() for w in line.split("=")]
if key in _FLOAT_KEYS:
value = float(value) / _FLOAT_SCALE
elif key not in _STRING_KEYS:
try:
value = int(value)
except ValueError:
stdout("Error in unpacking key {}".format(key))
pass
stdout(" ", key, "=", value)
self.config[key] = value
line = f.readline()
except OSError:
stdout("Default configuration")
def _save_config_to_ini(self):
import storage
## If MCU is mounted on a host, this call will fail
## and we report the error via returning False (0) to the caller.
##
## CircuitPython 5.2.0 or greater is needed for this process to work.
try:
storage.remount("/", readonly=False)
stdout("Remounted filesystem RW")
except RuntimeError as e:
stdout("Remount filesystem RW failed")
stdout(e)
return 0
with open("/config.ini", 'w') as f:
for key, value in self.config.items():
if key in _FLOAT_KEYS:
value = round(value * _FLOAT_SCALE)
elif key not in _STRING_KEYS:
value = int(value)
f.write(key + " = " + str(value) + "\r\n")
stdout("INI file written")
storage.remount("/", readonly=True)
stdout("Remounted filesystem RO")
return 1
def set_memory(self, cmd, name=0, value=0):
buf = [cmd << 5 | name, (value >> 8) & 0xFF, value & 0xFF]
crc = _generate_crc(buf)
self._write_register(_RMT_WRITE, buf + [crc])
def get_memory(self):
buf = self._read_register(_RMT_READ)
crc = _generate_crc(buf[0:3])
## Check that the data is intact
if crc == buf[3]:
## Clear the register so the host knows we got the request
self._write_register(_RMT_READ, [0,0,0,0])
## Parse request and pass to caller
cmd = buf[0] >> 5
name = buf[0] & 0b11111
value = buf[1] << 8 | buf[2]
return cmd, name, value
return None, None, None
def _process_host_get(self, name):
value = None
if name == _CFG_DATA_STATE:
value = self._read_mcp_register(_GPIO)
elif name == _CFG_POWER_MEASURE_12:
data = self.power_measure(ports=[1,2], raw=True, total=False)
value = data[0] + (data[1] << 8)
elif name == _CFG_POWER_MEASURE_34:
data = self.power_measure(ports=[3,4], raw=True, total=False)
value = data[0] + (data[1] << 8)
elif name == _CFG_POWER_LIMITS:
data = self.ucs.get_power_limits()
value = data[0] + (data[1] << 8)
else:
key = _process_find_key(name)
if key is None:
return
value = self.config[key]
## Convert from internal representation to external
if key in _FLOAT_KEYS:
value = round(value * _FLOAT_SCALE)
## Expose the value to the host
self.set_memory(_CMD_GET, name, value)
def _process_host_set(self, name, value):
if name == _CFG_DATA_STATE:
value = self._write_mcp_register(_GPIO, value)
elif name == _CFG_POWER_LIMITS:
port12 = value & 0xFF
port34 = (value >> 8) & 0xFF
self.ucs.set_power_limits(port12, port34)
else:
key = _process_find_key(name)
if key is None:
return
## Convert from external representation to internal
if key in _FLOAT_KEYS:
value_internal = float(value) / _FLOAT_SCALE
else:
value_internal = value
self.config[key] = value_internal
## Expose the value to the host, so it knows the set is complete
self.set_memory(_CMD_SET, name, value)
def setup_host_comms(self):
self._write_register(_RMT_IDENT, [__version__] + list(sys.implementation.version))
def poll_for_host_comms(self):
poll_time = time.monotonic()
cmd, name, value = self.get_memory()
if cmd == _CMD_GET:
self._process_host_get(name)
elif cmd == _CMD_SET:
self._process_host_set(name, value)
elif cmd == _CMD_RESET:
if value == _CMD_RESET_USB:
stdout("Host resetting USB IC")
self.reset()
self.configure()
elif value == _CMD_RESET_MCU:
stdout("Host rebooting MCU")
microcontroller.reset()
elif value == _CMD_RESET_BOOTLOADER:
stdout("Host rebooting MCU into bootloader mode")
microcontroller.on_next_reset(microcontroller.RunMode.BOOTLOADER)
microcontroller.reset()
elif cmd == _CMD_SAVE:
stdout("Saving configuration to INI file")
result = self._save_config_to_ini()
self.set_memory(_CMD_SAVE, 0, result)
if self._last_poll_time is not None and self.config["reset_on_delay"]:
if poll_time - self._last_poll_time > self.config["loop_delay"] * 10:
## Still need to reset history, otherwise RuntimeError will be
## continously raised once the first delay occurs.
self._last_poll_time = poll_time
raise RuntimeError("Excessive loop delay")
self._last_poll_time = poll_time
def set_last_poll_time(self, poll_time):
self._last_poll_time = poll_time
def set_port_swap(self, values=[False, False, False, False, False]):
value = 0
for idx, bit in enumerate(values):
if bit:
value = set_bit(value, idx)
self._write_register(_PORT_SWAP, [value])
def set_hub_control(self, lpm_disable=False, reset=False):
value = lpm_disable << 1 | \
reset
self._write_register(_HUB_CTRL, [value])
def set_hub_config_1(self, self_powered=True, vsm_disable=False, highspeed_disable=False, multitt_enable=False,
eop_disable=False, individual_current_sense=True, individual_port_power=True):
## individual_current_sense : 0 -> ganged sensing, 1 -> individual, 2 or 3 -> current sense not supported
value = self_powered << 7 | \
vsm_disable << 6 | \
highspeed_disable << 5 | \
multitt_enable << 4 | \
eop_disable << 3 | \
individual_current_sense << 1 | \
individual_port_power
self._write_register(_HUB_CONFIG_1, [value])
def set_hub_config_3(self, port_map_enable=True, string_descriptor_enable=False):
value = port_map_enable << 3 | \
string_descriptor_enable
self._write_register(_HUB_CONFIG_3, [value])
def get_hub_string_descriptor(self, address):
length = self._read_register(address)[0]
value = self._read_register(address, length)
return _utf16le_to_string(value[2:])
def set_hub_descriptors(self):
## Set the manufacturer name
string_data = _string_to_utf16le(_MANUF_STRING)
data = [len(string_data)+2, 0x03] + string_data
self._write_register(_MANUF_DESC, data, do_register_length_check=False)
self._write_register(_MANUF_DESC_LEN, [len(string_data)+2])
if self.config["descriptor_config"] == _DESCRIPTOR_CUSTOM:
product_string = self.config["descriptor_custom"]
elif self.config["descriptor_config"] == _DESCRIPTOR_MPN_REV:
product_string = self.eeprom.sku + "." + str(self.unit_revision)
else:
product_string = self.eeprom.sku + "." + str(self.unit_revision) + " " + self.eeprom.serial
## Set the product name based on the EEPROM data / data from config file
string_data = _string_to_utf16le(product_string)
data = [len(string_data)+2, 0x03] + string_data
self._write_register(_PRODUCT_DESC, data, do_register_length_check=False)
self._write_register(_PRODUCT_DESC_LEN, [len(string_data)+2])
def set_device_id(self):
self._write_register(_DEVICE_ID, [self.unit_revision, ord("C")])
def set_port_remap(self, ports=[1, 2, 3, 4]):
self.remap = ports
port12 = ((ports[1] << 4) & 0xFF) | (ports[0] & 0xFF)
port34 = ((ports[3] << 4) & 0xFF) | (ports[2] & 0xFF)
self._write_register(_REMAP_12, [port12])
self._write_register(_REMAP_34, [port34])
def set_charging_config(self, ports=[1,2,3,4], ucs_lim=0b11, enable=True, dcp=True, se1=0b00, china_mode=False):
## ucs_lim : When controlling UCS through I2C, this sets the current limit.
## 0b00 : 500 mA
## 0b01 : 1000 mA
## 0b10 : 1500 mA
## 0b11 : 2000 mA
## 'dcp' is Dedicated Charging Mode. Ignored if china_mode is enabled.
## This mode only active when a USB Host is not present. When a host is
## present, CDP mode is used.
## Bit 1 & 2 are SE1. Enables SE1 charging mode for certain devices.
## This mode is only activated when a USB host is not present. When a
## host is present, the mode of operation is CDP. When SE1 mode and DCP
## mode are both enabled, the hub toggles between the two modes of
## operation as necessary to ensure the device can charge.
##
## 0b00 : Mode Disabled
## 0b01 : 1A mode (D-: 2.7V, D+: 2.0V)
## 0b10 : 2A mode (D-: 2.0V, D+: 2.7V)
## 0b11 : 2.5A mode enabled (D-: 2.7V, D+: 2.7V)
## Bit 0 is Battery Charging Support Enable. This bit enables CDP and
## must be set for any battery charging functions to be enabled. Other
## functions in addi- tion to CDP are enabled by setting their
## respective bits in addition to this bit.
value = (ucs_lim & 0b11) << 6 | \
dcp << 5 | \
china_mode << 4 | \
(se1 & 0b11) << 1 | \
enable
for port in ports:
## Register address is based on the port number
self._write_register(_CHARGE_CONFIG+port-1, [value])
def set_mcp_config(self, inputs=[False, False, False, False]):
"""Set direction on MCP IO pins. 'inputs' list will set GP0 thru GP4 to inputs, if respective position is true"""
## Bits 7 thru 4 control USB data enables on downstream ports 1 thru 4, respectively.
## They must be set to 0 to make them outputs.
value = 0b00000000 | \
inputs[3] << 3 | \
inputs[2] << 2 | \
inputs[1] << 1 | \
inputs[0]
with self.mcp_device as i2c:
## Write to IODIR register and defaults to other registers.
## 0x09 (GPIO) register has to be 0b0000_0000 so that downstream ports default to enabled
i2c.write(bytearray([0x00, value, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
def _read_mcp_register(self, addr, max_attempts=5):
attempts = 0
while attempts < max_attempts:
attempts += 1
try:
with self.mcp_device as i2c:
self._BUFFER[0] = addr
i2c.write_then_readinto(self._BUFFER, self._BUFFER, out_end=1, in_end=1)
break
except OSError:
time.sleep(0.01)
if attempts >= max_attempts:
return None
return self._BUFFER[0]
def _write_mcp_register(self, addr, value, max_attempts=5):
attempts = 0
while attempts < max_attempts:
attempts += 1
try:
with self.mcp_device as i2c:
self._BUFFER[0] = addr
self._BUFFER[1] = value
i2c.write(self._BUFFER, end=2)
break
except OSError:
time.sleep(0.01)
if attempts >= max_attempts:
return None
return self._BUFFER[1]
def data_state(self):
value = self._read_mcp_register(_GPIO)
if value is None:
return None
return [not get_bit(value, 7), not get_bit(value, 6), not get_bit(value, 5), not get_bit(value, 4)]
def data_enable(self, ports=[]):
with self.mcp_device as i2c:
self._BUFFER[0] = _GPIO
i2c.write_then_readinto(self._BUFFER, self._BUFFER, out_end=1, in_start=1, in_end=2)
for port in ports:
self._BUFFER[1] = clear_bit(self._BUFFER[1], 8-port)
i2c.write(self._BUFFER, end=2)
def data_disable(self, ports=[]):
inbuf = bytearray(1)
with self.mcp_device as i2c:
self._BUFFER[0] = _GPIO
i2c.write_then_readinto(self._BUFFER, self._BUFFER, out_end=1, in_start=1, in_end=2)
for port in ports:
self._BUFFER[1] = set_bit(self._BUFFER[1], 8-port)
i2c.write(self._BUFFER, end=2)
def get_port_remap(self):
port12 = _bytearry_to_int(self._read_register(_REMAP_12))
port34 = _bytearry_to_int(self._read_register(_REMAP_34))
return [port12 & 0x0F, (port12 >> 4) & 0x0F, port34 & 0x0F, (port34 >> 4) & 0x0F]
def power_control_register(self, port):
if self._power_control_registers is None:
# Interconnect between Hub IC power control pins and downstream power control devices
# changed between REV 1 and REV 2. Therefore, we have to look at the hardware revision
# to know which register controls which physical port (even though the logical data
# connection to the Hub IC did not change between REV 1 and REV 2)
if self.unit_revision >= 2:
self._power_control_registers = [_POWER_SELECT_1+(port-1)*4 for port in [3,1,4,2]]
else:
self._power_control_registers = [_POWER_SELECT_1+(port-1)*4 for port in [1,2,3,4]]
return self._power_control_registers[port]
def power_state(self, ports=[1,2,3,4]):
out = []
for port in ports:
data = self._read_register(self.power_control_register(port-1))[0]
## Bits 3:0 mapping:
## 0b000 : Port power is disabled
## 0b001 : Port is on if USB2 port power is on
## 0b100 : Port is on if designated GPIO is on
## Upstream disconnect and downstream connection cause 0b100
## So, we need to check for value > 0 (not just bit 0) to determine
## if port power is on or not.
out.append((data & 0b111) > 0)
return out
def power_disable(self, ports=[]):
for port in ports:
self._write_register(_POWER_SELECT_1+(port-1)*4, [0x80])
def power_enable(self, ports=[]):
for port in ports:
self._write_register(_POWER_SELECT_1+(port-1)*4, [0x81])
def power_measure(self, ports=[1,2,3,4], total=True, raw=False, rescale=0):
return self.ucs.currents(ports=ports, total=total, raw=raw, rescale=rescale)
def power_errors(self):
return self.ucs.status()
@property
def rails(self):
vlim, vlogic = None, None
if self.vlim is not None:
voltage = float(self.vlim.value) / 65535.0 * self.vlim.reference_voltage
vlim = voltage * (1870 + 20000) / 1870
if self.vlogic is not None:
voltage = float(self.vlogic.value) / 65535.0 * self.vlogic.reference_voltage
vlogic = voltage * 2
return vlim, vlogic
| 34.346154
| 122
| 0.605914
|
4a1f96cf491ede58daa20b1d366630a1f73d6d30
| 2,568
|
py
|
Python
|
data/cirq_new/cirq_program/startCirq_noisy933.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/cirq_new/cirq_program/startCirq_noisy933.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/cirq_new/cirq_program/startCirq_noisy933.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=24
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[1])) # number=7
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.H.on(input_qubit[0])) # number=21
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=22
c.append(cirq.H.on(input_qubit[0])) # number=23
c.append(cirq.H.on(input_qubit[0])) # number=16
c.append(cirq.CZ.on(input_qubit[3],input_qubit[0])) # number=17
c.append(cirq.H.on(input_qubit[0])) # number=18
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=8
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=9
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=10
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=11
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=12
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=13
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=14
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=15
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=19
c.append(cirq.SWAP.on(input_qubit[3],input_qubit[0])) # number=20
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2820
circuit = circuit.with_noise(cirq.depolarize(p=0.01))
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq_noisy933.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 35.178082
| 77
| 0.689642
|
c4a44af604cb99441259658a62348b40e0899b01
| 42,100
|
py
|
Python
|
gplearn/tests/test_genetic.py
|
DaGuT/gplearn
|
8ef45101858cd0d61ac6c27ec1921fe185ee5882
|
[
"BSD-3-Clause"
] | null | null | null |
gplearn/tests/test_genetic.py
|
DaGuT/gplearn
|
8ef45101858cd0d61ac6c27ec1921fe185ee5882
|
[
"BSD-3-Clause"
] | null | null | null |
gplearn/tests/test_genetic.py
|
DaGuT/gplearn
|
8ef45101858cd0d61ac6c27ec1921fe185ee5882
|
[
"BSD-3-Clause"
] | null | null | null |
"""Testing the Genetic Programming module's underlying datastructure
(gplearn.genetic._Program) as well as the classes that use it,
gplearn.genetic.SymbolicRegressor and gplearn.genetic.SymbolicTransformer."""
# Author: Trevor Stephens <trevorstephens.com>
#
# License: BSD 3 clause
import pickle
import sys
import numpy as np
from scipy.stats import pearsonr, spearmanr
from sklearn.externals.six.moves import StringIO
from sklearn.datasets import load_boston
from sklearn.metrics import mean_absolute_error
from sklearn.model_selection import GridSearchCV
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.tree import DecisionTreeRegressor
from sklearn.utils.estimator_checks import check_estimator
from sklearn.utils.testing import assert_false, assert_true
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_equal, assert_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns
from sklearn.utils.validation import check_random_state
from gplearn.genetic import SymbolicRegressor, SymbolicTransformer
from gplearn.fitness import weighted_pearson, weighted_spearman
from gplearn._program import _Program
from gplearn.fitness import _fitness_map
from gplearn.functions import (add2, sub2, mul2, div2, sqrt1, log1, abs1, max2,
min2)
from gplearn.functions import _Function
# load the boston dataset and randomly permute it
rng = check_random_state(0)
boston = load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
def test_sklearn_estimator_checks_regressor():
"""Run the sklearn estimator validation checks on SymbolicRegressor"""
check_estimator(SymbolicRegressor)
def test_sklearn_estimator_checks_transformer():
"""Run the sklearn estimator validation checks on SymbolicTransformer"""
check_estimator(SymbolicTransformer)
def test_weighted_correlations():
"""Check weighted Pearson correlation coefficient matches scipy"""
random_state = check_random_state(415)
x1 = random_state.uniform(size=500)
x2 = random_state.uniform(size=500)
w1 = np.ones(500)
w2 = random_state.uniform(size=500)
# Pearson's correlation coefficient
scipy_pearson = pearsonr(x1, x2)[0]
# Check with constant weights (should be equal)
gplearn_pearson = weighted_pearson(x1, x2, w1)
assert_almost_equal(scipy_pearson, gplearn_pearson)
# Check with irregular weights (should be different)
gplearn_pearson = weighted_pearson(x1, x2, w2)
assert_true(abs(scipy_pearson - gplearn_pearson) > 0.01)
# Spearman's correlation coefficient
scipy_spearman = spearmanr(x1, x2)[0]
# Check with constant weights (should be equal)
gplearn_spearman = weighted_spearman(x1, x2, w1)
assert_almost_equal(scipy_spearman, gplearn_spearman)
# Check with irregular weights (should be different)
gplearn_spearman = weighted_pearson(x1, x2, w2)
assert_true(abs(scipy_spearman - gplearn_spearman) > 0.01)
def test_program_init_method():
"""Check 'full' creates longer and deeper programs than other methods"""
params = {'function_set': [add2, sub2, mul2, div2, sqrt1, log1, abs1, max2,
min2],
'arities': {1: [sqrt1, log1, abs1],
2: [add2, sub2, mul2, div2, max2, min2]},
'init_depth': (2, 6),
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
programs = []
for i in range(20):
programs.append(_Program(init_method='full',
random_state=random_state, **params))
full_length = np.mean([gp.length_ for gp in programs])
full_depth = np.mean([gp.depth_ for gp in programs])
programs = []
for i in range(20):
programs.append(_Program(init_method='half and half',
random_state=random_state, **params))
hnh_length = np.mean([gp.length_ for gp in programs])
hnh_depth = np.mean([gp.depth_ for gp in programs])
programs = []
for i in range(20):
programs.append(_Program(init_method='grow',
random_state=random_state, **params))
grow_length = np.mean([gp.length_ for gp in programs])
grow_depth = np.mean([gp.depth_ for gp in programs])
assert_greater(full_length, hnh_length)
assert_greater(hnh_length, grow_length)
assert_greater(full_depth, hnh_depth)
assert_greater(hnh_depth, grow_depth)
def test_program_init_depth():
"""Check 'full' creates constant depth programs for single depth limit"""
params = {'function_set': [add2, sub2, mul2, div2, sqrt1, log1, abs1, max2,
min2],
'arities': {1: [sqrt1, log1, abs1],
2: [add2, sub2, mul2, div2, max2, min2]},
'init_depth': (6, 6),
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
programs = []
for i in range(20):
programs.append(_Program(init_method='full',
random_state=random_state, **params))
full_depth = np.bincount([gp.depth_ for gp in programs])
programs = []
for i in range(20):
programs.append(_Program(init_method='half and half',
random_state=random_state, **params))
hnh_depth = np.bincount([gp.depth_ for gp in programs])
programs = []
for i in range(20):
programs.append(_Program(init_method='grow',
random_state=random_state, **params))
grow_depth = np.bincount([gp.depth_ for gp in programs])
assert_true(full_depth[-1] == 20)
assert_false(hnh_depth[-1] == 20)
assert_false(grow_depth[-1] == 20)
def test_validate_program():
"""Check that valid programs are accepted & invalid ones raise error"""
function_set = [add2, sub2, mul2, div2, sqrt1, log1, abs1, max2, min2]
arities = {1: [sqrt1, log1, abs1],
2: [add2, sub2, mul2, div2, max2, min2]},
init_depth = (2, 6)
init_method = 'half and half'
n_features = 10
const_range = (-1.0, 1.0)
metric = 'mean absolute error'
p_point_replace = 0.05
parsimony_coefficient = 0.1
random_state = check_random_state(415)
test_gp = [sub2, abs1, sqrt1, log1, log1, sqrt1, 7, abs1, abs1, abs1, log1,
sqrt1, 2]
# This one should be fine
_ = _Program(function_set, arities, init_depth, init_method, n_features,
const_range, metric, p_point_replace, parsimony_coefficient,
random_state, None, test_gp)
# Now try a couple that shouldn't be
assert_raises(ValueError, _Program, function_set, arities, init_depth,
init_method, n_features, const_range, metric,
p_point_replace, parsimony_coefficient, random_state,
None, test_gp[:-1])
assert_raises(ValueError, _Program, function_set, arities, init_depth,
init_method, n_features, const_range, metric,
p_point_replace, parsimony_coefficient, random_state,
None, test_gp + [1])
def test_print_overloading():
"""Check that printing a program object results in 'pretty' output"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
gp = _Program(random_state=random_state, program=test_gp, **params)
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(gp)
output = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
lisp = "mul(div(X8, X1), sub(X9, 0.500))"
assert_true(output == lisp)
# Test with feature names
params['feature_names'] = [str(n) for n in range(10)]
gp = _Program(random_state=random_state, program=test_gp, **params)
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(gp)
output = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
lisp = "mul(div(8, 1), sub(9, 0.500))"
assert_true(output == lisp)
def test_export_graphviz():
"""Check output of a simple program to Graphviz"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
# Test for a small program
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
gp = _Program(random_state=random_state, program=test_gp, **params)
output = gp.export_graphviz()
tree = 'digraph program {\n' \
'node [style=filled]0 [label="mul", fillcolor="#136ed4"] ;\n' \
'1 [label="div", fillcolor="#136ed4"] ;\n' \
'2 [label="X8", fillcolor="#60a6f6"] ;\n' \
'3 [label="X1", fillcolor="#60a6f6"] ;\n' \
'1 -> 3 ;\n1 -> 2 ;\n' \
'4 [label="sub", fillcolor="#136ed4"] ;\n' \
'5 [label="X9", fillcolor="#60a6f6"] ;\n' \
'6 [label="0.500", fillcolor="#60a6f6"] ;\n' \
'4 -> 6 ;\n4 -> 5 ;\n0 -> 4 ;\n0 -> 1 ;\n}'
assert_true(output == tree)
# Test with feature names
params['feature_names'] = [str(n) for n in range(10)]
gp = _Program(random_state=random_state, program=test_gp, **params)
output = gp.export_graphviz()
tree = tree.replace('X', '')
assert_true(output == tree)
# Test with fade_nodes
params['feature_names'] = None
gp = _Program(random_state=random_state, program=test_gp, **params)
output = gp.export_graphviz(fade_nodes=[0, 1, 2, 3])
tree = 'digraph program {\n' \
'node [style=filled]0 [label="mul", fillcolor="#cecece"] ;\n' \
'1 [label="div", fillcolor="#cecece"] ;\n' \
'2 [label="X8", fillcolor="#cecece"] ;\n' \
'3 [label="X1", fillcolor="#cecece"] ;\n' \
'1 -> 3 ;\n1 -> 2 ;\n' \
'4 [label="sub", fillcolor="#136ed4"] ;\n' \
'5 [label="X9", fillcolor="#60a6f6"] ;\n' \
'6 [label="0.500", fillcolor="#60a6f6"] ;\n' \
'4 -> 6 ;\n4 -> 5 ;\n0 -> 4 ;\n0 -> 1 ;\n}'
assert_true(output == tree)
# Test a degenerative single-node program
test_gp = [1]
gp = _Program(random_state=random_state, program=test_gp, **params)
output = gp.export_graphviz()
tree = 'digraph program {\n' \
'node [style=filled]0 [label="X1", fillcolor="#60a6f6"] ;\n}'
assert_true(output == tree)
def test_invalid_feature_names():
"""Check invalid feature names raise errors"""
for Symbolic in (SymbolicRegressor, SymbolicTransformer):
# Check invalid length feature_names
est = Symbolic(feature_names=['foo', 'bar'])
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check invalid type feature_name
feature_names = [str(n) for n in range(12)] + [0]
est = Symbolic(feature_names=feature_names)
assert_raises(ValueError, est.fit, boston.data, boston.target)
def test_execute():
"""Check executing the program works"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
# Test for a small program
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
X = np.reshape(random_state.uniform(size=50), (5, 10))
gp = _Program(random_state=random_state, program=test_gp, **params)
result = gp.execute(X)
expected = [-0.19656208, 0.78197782, -1.70123845, -0.60175969, -0.01082618]
assert_array_almost_equal(result, expected)
def test_all_metrics():
"""Check all supported metrics work"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
# Test for a small program
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
gp = _Program(random_state=random_state, program=test_gp, **params)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
sample_weight = np.ones(5)
expected = [1.48719809776, 1.82389179833, 1.76013763179, -0.2928200724,
-0.5]
result = []
for m in ['mean absolute error', 'mse', 'rmse', 'pearson', 'spearman']:
gp.metric = _fitness_map[m]
gp.raw_fitness_ = gp.raw_fitness(X, y, sample_weight)
result.append(gp.fitness())
assert_array_almost_equal(result, expected)
def test_get_subtree():
"""Check that get subtree does the same thing for self and new programs"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
# Test for a small program
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
gp = _Program(random_state=random_state, program=test_gp, **params)
self_test = gp.get_subtree(check_random_state(0))
external_test = gp.get_subtree(check_random_state(0), test_gp)
assert_equal(self_test, external_test)
def test_genetic_operations():
"""Check all genetic operations are stable and don't change programs"""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
# Test for a small program
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
donor = [add2, 0.1, sub2, 2, 7]
gp = _Program(random_state=random_state, program=test_gp, **params)
assert_equal([f.name if isinstance(f, _Function) else f
for f in gp.reproduce()],
['mul', 'div', 8, 1, 'sub', 9, 0.5])
assert_equal(gp.program, test_gp)
assert_equal([f.name if isinstance(f, _Function) else f
for f in gp.crossover(donor, random_state)[0]],
['sub', 2, 7])
assert_equal(gp.program, test_gp)
assert_equal([f.name if isinstance(f, _Function) else f
for f in gp.subtree_mutation(random_state)[0]],
['mul', 'div', 8, 1, 'sub', 'sub', 3, 5, 'add', 6, 3])
assert_equal(gp.program, test_gp)
assert_equal([f.name if isinstance(f, _Function) else f
for f in gp.hoist_mutation(random_state)[0]],
['div', 8, 1])
assert_equal(gp.program, test_gp)
assert_equal([f.name if isinstance(f, _Function) else f
for f in gp.point_mutation(random_state)[0]],
['mul', 'div', 8, 1, 'sub', 9, 0.5])
assert_equal(gp.program, test_gp)
def test_program_input_validation():
"""Check that guarded input validation raises errors"""
for Symbolic in (SymbolicRegressor, SymbolicTransformer):
# Check too much proba
est = Symbolic(p_point_mutation=.5)
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check invalid init_method
est = Symbolic(init_method='ni')
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check invalid const_ranges
est = Symbolic(const_range=2)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(const_range=[2, 2])
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(const_range=(2, 2, 2))
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(const_range='ni')
assert_raises(ValueError, est.fit, boston.data, boston.target)
# And check acceptable, but strange, representations of const_range
est = Symbolic(generations=2, const_range=(2, 2))
est.fit(boston.data, boston.target)
est = Symbolic(generations=2, const_range=None)
est.fit(boston.data, boston.target)
est = Symbolic(generations=2, const_range=(4, 2))
est.fit(boston.data, boston.target)
# Check invalid init_depth
est = Symbolic(init_depth=2)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(init_depth=2)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(init_depth=[2, 2])
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(init_depth=(2, 2, 2))
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(init_depth='ni')
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(init_depth=(4, 2))
assert_raises(ValueError, est.fit, boston.data, boston.target)
# And check acceptable, but strange, representations of init_depth
est = Symbolic(generations=2, init_depth=(2, 2))
est.fit(boston.data, boston.target)
# Check hall_of_fame and n_components for transformer
est = SymbolicTransformer(hall_of_fame=2000)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = SymbolicTransformer(n_components=2000)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = SymbolicTransformer(hall_of_fame=0)
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = SymbolicTransformer(n_components=0)
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check regressor metrics
for m in ['mean absolute error', 'mse', 'rmse', 'pearson', 'spearman']:
est = SymbolicRegressor(generations=2, metric=m)
est.fit(boston.data, boston.target)
# And check a fake one
est = SymbolicRegressor(generations=2, metric='the larch')
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check transformer metrics
for m in ['pearson', 'spearman']:
est = SymbolicTransformer(generations=2, metric=m)
est.fit(boston.data, boston.target)
# And check the regressor metrics as well as a fake one
for m in ['mean absolute error', 'mse', 'rmse', 'the larch']:
est = SymbolicTransformer(generations=2, metric=m)
assert_raises(ValueError, est.fit, boston.data, boston.target)
def test_none_const_range():
"""Check that const_range=None produces no constants"""
# Check with None as const_range
est = SymbolicRegressor(const_range=None, generations=2)
est.fit(boston.data, boston.target)
float_count = 0
for generation in est._programs:
for program in generation:
if program is None:
continue
for element in program.program:
if type(element) == float:
float_count += 1
assert_true(float_count == 0)
# Check with default const_range
est = SymbolicRegressor(generations=2)
est.fit(boston.data, boston.target)
float_count = 0
for generation in est._programs:
for program in generation:
if program is None:
continue
for element in program.program:
if type(element) == float:
float_count += 1
assert_true(float_count > 1)
def test_sample_weight():
"""Check sample_weight param works"""
# Check constant sample_weight has no effect
sample_weight = np.ones(boston.target.shape[0])
est1 = SymbolicRegressor(generations=2, random_state=0)
est1.fit(boston.data, boston.target)
est2 = SymbolicRegressor(generations=2, random_state=0)
est2.fit(boston.data, boston.target, sample_weight=sample_weight)
# And again with a scaled sample_weight
est3 = SymbolicRegressor(generations=2, random_state=0)
est3.fit(boston.data, boston.target, sample_weight=sample_weight * 1.1)
assert_almost_equal(est1._program.fitness_, est2._program.fitness_)
assert_almost_equal(est1._program.fitness_, est3._program.fitness_)
# And again for the transformer
sample_weight = np.ones(boston.target.shape[0])
est1 = SymbolicTransformer(generations=2, random_state=0)
est1 = est1.fit_transform(boston.data, boston.target)
est2 = SymbolicTransformer(generations=2, random_state=0)
est2 = est2.fit_transform(boston.data, boston.target,
sample_weight=sample_weight)
assert_array_almost_equal(est1, est2)
def test_trigonometric():
"""Check that using trig functions work and that results differ"""
est1 = SymbolicRegressor(random_state=0)
est1.fit(boston.data[:400, :], boston.target[:400])
est1 = mean_absolute_error(est1.predict(boston.data[400:, :]),
boston.target[400:])
est2 = SymbolicRegressor(function_set=['add', 'sub', 'mul', 'div',
'sin', 'cos', 'tan'],
random_state=0)
est2.fit(boston.data[:400, :], boston.target[:400])
est2 = mean_absolute_error(est2.predict(boston.data[400:, :]),
boston.target[400:])
assert_true(abs(est1 - est2) > 0.01)
def test_subsample():
"""Check that subsample work and that results differ"""
est1 = SymbolicRegressor(max_samples=1.0, random_state=0)
est1.fit(boston.data[:400, :], boston.target[:400])
est1 = mean_absolute_error(est1.predict(boston.data[400:, :]),
boston.target[400:])
est2 = SymbolicRegressor(max_samples=0.7, random_state=0)
est2.fit(boston.data[:400, :], boston.target[:400])
est2 = mean_absolute_error(est2.predict(boston.data[400:, :]),
boston.target[400:])
assert_true(abs(est1 - est2) > 0.01)
def test_parsimony_coefficient():
"""Check that parsimony coefficients work and that results differ"""
est1 = SymbolicRegressor(parsimony_coefficient=0.001, random_state=0)
est1.fit(boston.data[:400, :], boston.target[:400])
est1 = mean_absolute_error(est1.predict(boston.data[400:, :]),
boston.target[400:])
est2 = SymbolicRegressor(parsimony_coefficient=0.1, random_state=0)
est2.fit(boston.data[:400, :], boston.target[:400])
est2 = mean_absolute_error(est2.predict(boston.data[400:, :]),
boston.target[400:])
est3 = SymbolicRegressor(parsimony_coefficient='auto', random_state=0)
est3.fit(boston.data[:400, :], boston.target[:400])
est3 = mean_absolute_error(est3.predict(boston.data[400:, :]),
boston.target[400:])
assert_true(abs(est1 - est2) > 0.01)
assert_true(abs(est1 - est3) > 0.01)
assert_true(abs(est2 - est3) > 0.01)
def test_early_stopping():
"""Check that early stopping works"""
est1 = SymbolicRegressor(stopping_criteria=10, random_state=0)
est1.fit(boston.data[:400, :], boston.target[:400])
assert_true(len(est1._programs) == 1)
est1 = SymbolicTransformer(stopping_criteria=0.5, random_state=0)
est1.fit(boston.data[:400, :], boston.target[:400])
assert_true(len(est1._programs) == 1)
def test_verbose_output():
"""Check verbose=1 does not cause error"""
old_stdout = sys.stdout
sys.stdout = StringIO()
est = SymbolicRegressor(random_state=0, verbose=1)
est.fit(boston.data, boston.target)
verbose_output = sys.stdout
sys.stdout = old_stdout
# check output
verbose_output.seek(0)
header1 = verbose_output.readline().rstrip()
true_header = ' |{:^25}|{:^42}|'.format('Population Average',
'Best Individual')
assert_equal(true_header, header1)
header2 = verbose_output.readline().rstrip()
true_header = '-' * 4 + ' ' + '-' * 25 + ' ' + '-' * 42 + ' ' + '-' * 10
assert_equal(true_header, header2)
header3 = verbose_output.readline().rstrip()
line_format = '{:>4} {:>8} {:>16} {:>8} {:>16} {:>16} {:>10}'
true_header = line_format.format('Gen', 'Length', 'Fitness', 'Length',
'Fitness', 'OOB Fitness', 'Time Left')
assert_equal(true_header, header3)
n_lines = sum(1 for l in verbose_output.readlines())
assert_equal(20, n_lines)
def test_verbose_with_oob():
"""Check oob scoring for subsample does not cause error"""
old_stdout = sys.stdout
sys.stdout = StringIO()
est = SymbolicRegressor(max_samples=0.9, random_state=0, verbose=1)
est.fit(boston.data, boston.target)
verbose_output = sys.stdout
sys.stdout = old_stdout
# check output
verbose_output.seek(0)
header1 = verbose_output.readline().rstrip()
header2 = verbose_output.readline().rstrip()
header3 = verbose_output.readline().rstrip()
n_lines = sum(1 for l in verbose_output.readlines())
assert_equal(20, n_lines)
def test_more_verbose_output():
"""Check verbose=2 does not cause error"""
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = StringIO()
sys.stderr = StringIO()
est = SymbolicRegressor(random_state=0, verbose=2)
est.fit(boston.data, boston.target)
verbose_output = sys.stdout
joblib_output = sys.stderr
sys.stdout = old_stdout
sys.stderr = old_stderr
# check output
verbose_output.seek(0)
header1 = verbose_output.readline().rstrip()
header2 = verbose_output.readline().rstrip()
header3 = verbose_output.readline().rstrip()
n_lines = sum(1 for l in verbose_output.readlines())
assert_equal(20, n_lines)
joblib_output.seek(0)
n_lines = sum(1 for l in joblib_output.readlines())
# New version of joblib appears to output sys.stderr
assert_equal(0, n_lines % 10)
def test_parallel_train():
"""Check predictions are the same for different n_jobs"""
# Check the regressor
ests = [
SymbolicRegressor(population_size=100, generations=4, n_jobs=n_jobs,
random_state=0).fit(boston.data[:100, :],
boston.target[:100])
for n_jobs in [1, 2, 3, 8, 16]
]
preds = [e.predict(boston.data[500:, :]) for e in ests]
for pred1, pred2 in zip(preds, preds[1:]):
assert_array_almost_equal(pred1, pred2)
lengths = np.array([[gp.length_ for gp in e._programs[-1]] for e in ests])
for len1, len2 in zip(lengths, lengths[1:]):
assert_array_almost_equal(len1, len2)
# Check the transformer
ests = [
SymbolicTransformer(population_size=100, hall_of_fame=50,
generations=4, n_jobs=n_jobs,
random_state=0).fit(boston.data[:100, :],
boston.target[:100])
for n_jobs in [1, 2, 3, 8, 16]
]
preds = [e.transform(boston.data[500:, :]) for e in ests]
for pred1, pred2 in zip(preds, preds[1:]):
assert_array_almost_equal(pred1, pred2)
lengths = np.array([[gp.length_ for gp in e._programs[-1]] for e in ests])
for len1, len2 in zip(lengths, lengths[1:]):
assert_array_almost_equal(len1, len2)
def test_pickle():
"""Check pickability"""
# Check the regressor
est = SymbolicRegressor(generations=2, random_state=0)
est.fit(boston.data[:100, :], boston.target[:100])
score = est.score(boston.data[500:, :], boston.target[500:])
pickle_object = pickle.dumps(est)
est2 = pickle.loads(pickle_object)
assert_equal(type(est2), est.__class__)
score2 = est2.score(boston.data[500:, :], boston.target[500:])
assert_equal(score, score2)
# Check the transformer
est = SymbolicTransformer(generations=2, random_state=0)
est.fit(boston.data[:100, :], boston.target[:100])
X_new = est.transform(boston.data[500:, :])
pickle_object = pickle.dumps(est)
est2 = pickle.loads(pickle_object)
assert_equal(type(est2), est.__class__)
X_new2 = est2.transform(boston.data[500:, :])
assert_array_almost_equal(X_new, X_new2)
def test_memory_layout():
"""Check that it works no matter the memory layout"""
for Symbolic in [SymbolicTransformer, SymbolicRegressor]:
for dtype in [np.float64, np.float32]:
est = Symbolic(generations=2, random_state=0)
# Nothing
X = np.asarray(boston.data, dtype=dtype)
y = boston.target
est.fit(X, y)
# C-order
X = np.asarray(boston.data, order="C", dtype=dtype)
y = boston.target
est.fit(X, y)
# F-order
X = np.asarray(boston.data, order="F", dtype=dtype)
y = boston.target
est.fit(X, y)
# Contiguous
X = np.ascontiguousarray(boston.data, dtype=dtype)
y = boston.target
est.fit(X, y)
# Strided
X = np.asarray(boston.data[::3], dtype=dtype)
y = boston.target[::3]
est.fit(X, y)
def test_input_shape():
"""Check changed dimensions cause failure"""
random_state = check_random_state(415)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
X2 = np.reshape(random_state.uniform(size=45), (5, 9))
# Check the regressor
est = SymbolicRegressor(generations=2, random_state=0)
est.fit(X, y)
assert_raises(ValueError, est.predict, X2)
# Check the transformer
est = SymbolicTransformer(generations=2, random_state=0)
est.fit(X, y)
assert_raises(ValueError, est.transform, X2)
def test_output_shape():
"""Check output shape is as expected"""
random_state = check_random_state(415)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
# Check the transformer
est = SymbolicTransformer(n_components=5, generations=2, random_state=0)
est.fit(X, y)
assert_true(est.transform(X).shape == (5, 5))
def test_gridsearch():
"""Check that SymbolicRegressor can be grid-searched"""
# Grid search parsimony_coefficient
parameters = {'parsimony_coefficient': [0.001, 0.1, 'auto']}
clf = SymbolicRegressor(population_size=50, generations=5,
tournament_size=5, random_state=0)
grid = GridSearchCV(clf, parameters, cv=3,
scoring='neg_mean_absolute_error')
grid.fit(boston.data, boston.target)
expected = {'parsimony_coefficient': 0.001}
assert_equal(grid.best_params_, expected)
def test_pipeline():
"""Check that SymbolicRegressor/Transformer can work in a pipeline"""
# Check the regressor
est = make_pipeline(StandardScaler(),
SymbolicRegressor(population_size=50,
generations=5,
tournament_size=5,
random_state=0))
est.fit(boston.data, boston.target)
assert_almost_equal(est.score(boston.data, boston.target), -4.00270923)
# Check the transformer
est = make_pipeline(SymbolicTransformer(population_size=50,
hall_of_fame=20,
generations=5,
tournament_size=5,
random_state=0),
DecisionTreeRegressor())
est.fit(boston.data, boston.target)
assert_almost_equal(est.score(boston.data, boston.target), 1.0)
def test_transformer_iterable():
"""Check that the transformer is iterable"""
random_state = check_random_state(415)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
function_set = ['add', 'sub', 'mul', 'div', 'sqrt', 'log', 'abs', 'neg',
'inv', 'max', 'min']
est = SymbolicTransformer(population_size=500, generations=2,
function_set=function_set, random_state=0)
# Check unfitted
unfitted_len = len(est)
unfitted_iter = [gp.length_ for gp in est]
expected_iter = []
assert_true(unfitted_len == 0)
assert_true(unfitted_iter == expected_iter)
# Check fitted
est.fit(X, y)
fitted_len = len(est)
fitted_iter = [gp.length_ for gp in est]
expected_iter = [8, 12, 2, 29, 9, 33, 9, 8, 4, 22]
assert_true(fitted_len == 10)
assert_true(fitted_iter == expected_iter)
# Check IndexError
assert_raises(IndexError, est.__getitem__, 10)
def test_print_overloading_estimator():
"""Check that printing a fitted estimator results in 'pretty' output"""
random_state = check_random_state(415)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
# Check the regressor
est = SymbolicRegressor(generations=2, random_state=0)
# Unfitted
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(est)
output_unfitted = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
# Fitted
est.fit(X, y)
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(est)
output_fitted = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(est._program)
output_program = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
assert_true(output_unfitted != output_fitted)
assert_true(output_unfitted == est.__repr__())
assert_true(output_fitted == output_program)
# Check the transformer
est = SymbolicTransformer(generations=2, random_state=0)
# Unfitted
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(est)
output_unfitted = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
# Fitted
est.fit(X, y)
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
print(est)
output_fitted = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
orig_stdout = sys.stdout
try:
out = StringIO()
sys.stdout = out
output = str([gp.__str__() for gp in est])
print(output.replace("',", ",\n").replace("'", ""))
output_program = out.getvalue().strip()
finally:
sys.stdout = orig_stdout
assert_true(output_unfitted != output_fitted)
assert_true(output_unfitted == est.__repr__())
assert_true(output_fitted == output_program)
def test_validate_functions():
"""Check that valid functions are accepted & invalid ones raise error"""
random_state = check_random_state(415)
X = np.reshape(random_state.uniform(size=50), (5, 10))
y = random_state.uniform(size=5)
for Symbolic in (SymbolicRegressor, SymbolicTransformer):
# These should be fine
est = Symbolic(generations=2, random_state=0,
function_set=(add2, sub2, mul2, div2))
est.fit(boston.data, boston.target)
est = Symbolic(generations=2, random_state=0,
function_set=('add', 'sub', 'mul', div2))
est.fit(boston.data, boston.target)
# These should fail
est = Symbolic(generations=2, random_state=0,
function_set=('ni', 'sub', 'mul', div2))
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(generations=2, random_state=0,
function_set=(7, 'sub', 'mul', div2))
assert_raises(ValueError, est.fit, boston.data, boston.target)
est = Symbolic(generations=2, random_state=0, function_set=())
assert_raises(ValueError, est.fit, boston.data, boston.target)
def test_indices():
"""Check that indices are stable when generated on the fly."""
params = {'function_set': [add2, sub2, mul2, div2],
'arities': {2: [add2, sub2, mul2, div2]},
'init_depth': (2, 6),
'init_method': 'half and half',
'n_features': 10,
'const_range': (-1.0, 1.0),
'metric': 'mean absolute error',
'p_point_replace': 0.05,
'parsimony_coefficient': 0.1}
random_state = check_random_state(415)
test_gp = [mul2, div2, 8, 1, sub2, 9, .5]
gp = _Program(random_state=random_state, program=test_gp, **params)
assert_raises(ValueError, gp.get_all_indices)
assert_raises(ValueError, gp._indices)
def get_indices_property():
return gp.indices_
assert_raises(ValueError, get_indices_property)
indices, _ = gp.get_all_indices(10, 7, random_state)
assert_array_equal(indices, gp.get_all_indices()[0])
assert_array_equal(indices, gp._indices())
assert_array_equal(indices, gp.indices_)
def test_run_details():
"""Check the run_details_ attribute works as expected."""
est = SymbolicRegressor(generations=5, random_state=415)
est.fit(boston.data, boston.target)
# Check generations are indexed as expected without warm_start
assert_equal(est.run_details_['generation'], list(range(5)))
est.set_params(generations=10, warm_start=True)
est.fit(boston.data, boston.target)
# Check generations are indexed as expected with warm_start
assert_equal(est.run_details_['generation'], list(range(10)))
# Check all details have expected number of elements
for detail in est.run_details_:
assert_equal(len(est.run_details_[detail]), 10)
def test_warm_start():
"""Check the warm_start functionality works as expected."""
est = SymbolicRegressor(generations=20, random_state=415)
est.fit(boston.data, boston.target)
cold_fitness = est._program.fitness_
cold_program = est._program.__str__()
# Check fitting fewer generations raises error
est.set_params(generations=5, warm_start=True)
assert_raises(ValueError, est.fit, boston.data, boston.target)
# Check fitting the same number of generations warns
est.set_params(generations=20, warm_start=True)
assert_warns(UserWarning, est.fit, boston.data, boston.target)
# Check warm starts get the same result
est = SymbolicRegressor(generations=10, random_state=415)
est.fit(boston.data, boston.target)
est.set_params(generations=20, warm_start=True)
est.fit(boston.data, boston.target)
warm_fitness = est._program.fitness_
warm_program = est._program.__str__()
assert_almost_equal(cold_fitness, warm_fitness)
assert_equal(cold_program, warm_program)
def test_low_memory():
"""Check the low_memory functionality works as expected."""
est = SymbolicRegressor(generations=10,
random_state=56,
low_memory=True)
# Check there are no parents
est.fit(boston.data, boston.target)
assert_true(est._programs[-2] is None)
def test_low_memory_warm_start():
"""Check the warm_start functionality works as expected with low_memory."""
est = SymbolicRegressor(generations=20,
random_state=415,
low_memory=True)
est.fit(boston.data, boston.target)
cold_fitness = est._program.fitness_
cold_program = est._program.__str__()
# Check warm start with low memory gets the same result
est = SymbolicRegressor(generations=10,
random_state=415,
low_memory=True)
est.fit(boston.data, boston.target)
est.set_params(generations=20, warm_start=True)
est.fit(boston.data, boston.target)
warm_fitness = est._program.fitness_
warm_program = est._program.__str__()
assert_almost_equal(cold_fitness, warm_fitness)
assert_equal(cold_program, warm_program)
if __name__ == "__main__":
import nose
nose.runmodule()
| 37.12522
| 79
| 0.629786
|
da48ff50d54716c6b4f26cc3743865e27af5dd03
| 1,450
|
py
|
Python
|
hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/__init__.py
|
qiangzai00001/hio-prj
|
060ff97fe21093b1369db78109d5b730b2b181c8
|
[
"MIT"
] | null | null | null |
hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/__init__.py
|
qiangzai00001/hio-prj
|
060ff97fe21093b1369db78109d5b730b2b181c8
|
[
"MIT"
] | null | null | null |
hio-yocto-bsp/sources/poky/scripts/lib/mic/3rdparty/pykickstart/commands/__init__.py
|
qiangzai00001/hio-prj
|
060ff97fe21093b1369db78109d5b730b2b181c8
|
[
"MIT"
] | null | null | null |
#
# Chris Lumens <clumens@redhat.com>
#
# Copyright 2009 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat
# trademarks that are incorporated in the source code or documentation are not
# subject to the GNU General Public License and may only be used or replicated
# with the express permission of Red Hat, Inc.
#
import authconfig, autopart, autostep, bootloader, clearpart, device
import deviceprobe, displaymode, dmraid, driverdisk, fcoe, firewall, firstboot
import group, ignoredisk, interactive, iscsi, iscsiname, key, keyboard, lang
import langsupport, lilocheck, logging, logvol, mediacheck, method, monitor
import mouse, multipath, network, partition, raid, reboot, repo, rescue, rootpw
import selinux, services, skipx, sshpw, timezone, updates, upgrade, user, vnc
import volgroup, xconfig, zerombr, zfcp
| 53.703704
| 79
| 0.781379
|
cb831a65d4e4f8f337fbdcf3d43fc1ebae8a0809
| 15,923
|
py
|
Python
|
packages/w3af/w3af/plugins/tests/crawl/test_open_api.py
|
ZooAtmosphereGroup/HelloPackages
|
0ccffd33bf927b13d28c8f715ed35004c33465d9
|
[
"Apache-2.0"
] | null | null | null |
packages/w3af/w3af/plugins/tests/crawl/test_open_api.py
|
ZooAtmosphereGroup/HelloPackages
|
0ccffd33bf927b13d28c8f715ed35004c33465d9
|
[
"Apache-2.0"
] | null | null | null |
packages/w3af/w3af/plugins/tests/crawl/test_open_api.py
|
ZooAtmosphereGroup/HelloPackages
|
0ccffd33bf927b13d28c8f715ed35004c33465d9
|
[
"Apache-2.0"
] | null | null | null |
"""
test_open_api.py
Copyright 2018 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import json
import re
from mock import patch
from w3af.plugins.audit.sqli import sqli
from w3af.plugins.tests.helper import PluginTest, PluginConfig, MockResponse
from w3af.core.data.dc.headers import Headers
from w3af.core.data.parsers.doc.open_api import OpenAPI
from w3af.core.data.parsers.doc.open_api.tests.example_specifications import (IntParamQueryString,
NestedModel,
PetstoreSimpleModel)
API_KEY = '0x12345'
class TestOpenAPIFindAllEndpointsWithAuth(PluginTest):
target_url = 'http://w3af.org/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api',
('query_string_auth',
'api_key=%s' % API_KEY,
PluginConfig.QUERY_STRING),
),)}
}
}
MOCK_RESPONSES = [MockResponse('http://w3af.org/swagger.json?api_key=%s' % API_KEY,
IntParamQueryString().get_specification(),
content_type='application/json')]
def test_find_all_endpoints_with_auth(self):
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 1, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
#
# Now check that we found all the fuzzable requests
#
fuzzable_requests = self.kb.get_all_known_fuzzable_requests()
self.assertEqual(len(fuzzable_requests), 4)
# Remove the /swagger.json and /
fuzzable_requests = [f for f in fuzzable_requests if f.get_url().get_path() not in ('/swagger.json', '/')]
# Order them to be able to easily assert things
def by_path(fra, frb):
return cmp(fra.get_url().url_string, frb.get_url().url_string)
fuzzable_requests.sort(by_path)
#
# Assertions on call #1
#
fuzzable_request = fuzzable_requests[0]
e_url = 'http://w3af.org/api/pets?api_key=0x12345'
e_headers = Headers([('Content-Type', 'application/json')])
self.assertEqual(fuzzable_request.get_method(), 'GET')
self.assertEqual(fuzzable_request.get_uri().url_string, e_url)
self.assertEqual(fuzzable_request.get_headers(), e_headers)
self.assertEqual(fuzzable_request.get_data(), '')
#
# Assertions on call #2
#
fuzzable_request = fuzzable_requests[1]
e_url = 'http://w3af.org/api/pets?limit=42&api_key=0x12345'
e_headers = Headers([('Content-Type', 'application/json')])
self.assertEqual(fuzzable_request.get_method(), 'GET')
self.assertEqual(fuzzable_request.get_uri().url_string, e_url)
self.assertEqual(fuzzable_request.get_headers(), e_headers)
self.assertEqual(fuzzable_request.get_data(), '')
class HeaderAuthenticatedMockResponse(MockResponse):
def get_response(self, http_request, uri, response_headers):
"""
Authenticated using request headers and API key
:return: A response containing:
* HTTP status code
* Headers dict
* Response body string
"""
bearer = http_request.headers.get('Basic', '')
if bearer != TestOpenAPINestedModelSpec.BEARER:
response_headers.update({'status': 401})
return 401, response_headers, 'Missing authentication'
return super(HeaderAuthenticatedMockResponse, self).get_response(http_request,
uri,
response_headers)
class TestOpenAPINestedModelSpec(PluginTest):
BEARER = 'bearer 0x12345'
target_url = 'http://w3af.org/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api',
('header_auth',
'Basic: %s' % BEARER,
PluginConfig.HEADER),
),),
'audit': (PluginConfig('sqli'),)}
}
}
class SQLIMockResponse(MockResponse):
def get_response(self, http_request, uri, response_headers):
basic = http_request.headers.get('Basic', '')
if basic != TestOpenAPINestedModelSpec.BEARER:
return 401, response_headers, ''
# The body is in json format, need to escape my double quotes
request_body = json.dumps(http_request.parsed_body)
payloads = [p.replace('"', '\\"') for p in sqli.SQLI_STRINGS]
response_body = 'Sunny outside'
for payload in payloads:
if payload in request_body:
response_body = 'PostgreSQL query failed:'
break
return self.status, response_headers, response_body
MOCK_RESPONSES = [HeaderAuthenticatedMockResponse('http://w3af.org/openapi.json',
NestedModel().get_specification(),
content_type='application/json'),
SQLIMockResponse(re.compile('http://w3af.org/api/pets.*'),
body=None,
method='GET',
status=200)]
def test_find_all_endpoints_with_auth(self):
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 1, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
#
# Now check that we found all the fuzzable requests
#
fuzzable_requests = self.kb.get_all_known_fuzzable_requests()
self.assertEqual(len(fuzzable_requests), 3)
# Remove the /openapi.json and /
fuzzable_requests = [f for f in fuzzable_requests if f.get_url().get_path() not in ('/openapi.json', '/')]
# Order them to be able to easily assert things
def by_path(fra, frb):
return cmp(fra.get_url().url_string, frb.get_url().url_string)
fuzzable_requests.sort(by_path)
self.assertEqual(len(fuzzable_requests), 1)
#
# Assertions on call #1
#
fuzzable_request = fuzzable_requests[0]
e_url = 'http://w3af.org/api/pets'
e_data = '{"pet": {"tag": "7", "name": "John", "id": 42}}'
e_headers = Headers([('Content-Type', 'application/json'),
('Basic', 'bearer 0x12345')])
self.assertEqual(fuzzable_request.get_method(), 'GET')
self.assertEqual(fuzzable_request.get_uri().url_string, e_url)
self.assertEqual(fuzzable_request.get_headers(), e_headers)
self.assertEqual(fuzzable_request.get_data(), e_data)
vulns = self.kb.get('sqli', 'sqli')
self.assertEqual(len(vulns), 2)
class TestOpenAPIRaisesWarningIfNoAuth(PluginTest):
target_url = 'http://w3af.org/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api'),)}
}
}
MOCK_RESPONSES = [MockResponse('http://w3af.org/openapi.json',
NestedModel().get_specification(),
content_type='application/json')]
def test_auth_warning_raised(self):
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 2, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
info_i = infos[1]
self.assertEqual(info_i.get_name(), 'Open API missing credentials')
class TestOpenAPIRaisesWarningIfParsingError(PluginTest):
target_url = 'http://w3af.org/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api'),)}
}
}
MOCK_RESPONSES = [MockResponse('http://w3af.org/openapi.json',
NestedModel().get_specification()[:-1],
content_type='application/json')]
def test_parsing_error_raised(self):
cfg = self._run_configs['cfg']
with patch.object(OpenAPI, 'can_parse', return_value=True):
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 1, infos)
info = infos[0]
expected_desc = (
'An Open API specification was found at: "http://w3af.org/openapi.json",'
' but the scanner was unable to extract any API endpoints. In most'
' cases this is because of a syntax error in the Open API specification.\n'
'\n'
'Use https://editor.swagger.io/ to inspect the Open API specification,'
' identify and fix any issues and try again.\n\nThe errors found by'
' the parser were:\n'
'\n'
' - The OpenAPI specification at http://w3af.org/openapi.json is not in'
' JSON or YAML format'
)
self.assertEqual(info.get_name(), 'Failed to parse Open API specification')
self.assertEqual(info.get_desc(with_id=False), expected_desc)
class TestOpenAPIFindsSpecInOtherDirectory(PluginTest):
target_url = 'http://w3af.org/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api'),)}
}
}
MOCK_RESPONSES = [MockResponse('http://w3af.org/api/v2/openapi.json',
NestedModel().get_specification(),
content_type='application/json')]
def test_auth_warning_raised(self):
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 2, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
class TestOpenAPIFindsSpecInOtherDirectory2(PluginTest):
target_url = 'http://w3af.org/a/b/c/'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api'),)}
}
}
MOCK_RESPONSES = [MockResponse('http://w3af.org/a/openapi.json',
NestedModel().get_specification(),
content_type='application/json')]
def test_auth_warning_raised(self):
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Info
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 2, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
class TestOpenAPIFuzzURLParts(PluginTest):
api_key = 'xxx-yyy-zzz'
target_url = 'http://petstore.swagger.io/'
vulnerable_url = 'http://petstore.swagger.io/api/pets/1%272%223'
_run_configs = {
'cfg': {
'target': target_url,
'plugins': {'crawl': (PluginConfig('open_api',
('header_auth',
'X-API-Key: %s' % api_key,
PluginConfig.HEADER),
),),
'audit': (PluginConfig('sqli'),)}
}
}
class SQLIMockResponse(MockResponse):
def get_response(self, http_request, uri, response_headers):
header = http_request.headers.get('X-API-Key', '')
if header != TestOpenAPIFuzzURLParts.api_key:
return 401, response_headers, ''
response_body = 'Sunny outside'
status = 200
if uri == TestOpenAPIFuzzURLParts.vulnerable_url:
response_body = 'PostgreSQL query failed:'
status = 500
return status, response_headers, response_body
MOCK_RESPONSES = [MockResponse('http://petstore.swagger.io/openapi.json',
PetstoreSimpleModel().get_specification(),
content_type='application/json'),
SQLIMockResponse(re.compile('http://petstore.swagger.io/api/pets.*'),
body='{}',
method='GET',
status=200),
SQLIMockResponse(re.compile('http://petstore.swagger.io/api/pets.*'),
body='{}',
method='POST',
status=200)
]
def test_fuzzing_parameters_in_path(self):
#
# TODO: This unittest is failing because of basePath being ignored
# or incorrectly handled by the parser. Note that the request
# being sent by the fuzzer goes to http://petstore.swagger.io/pets/...
# instead of http://petstore.swagger.io/api/pets/...
#
cfg = self._run_configs['cfg']
self._scan(cfg['target'], cfg['plugins'])
#
# Since we configured authentication we should only get one of the Infos
#
infos = self.kb.get('open_api', 'open_api')
self.assertEqual(len(infos), 1, infos)
info_i = infos[0]
self.assertEqual(info_i.get_name(), 'Open API specification found')
vulns = self.kb.get('sqli', 'sqli')
self.assertEqual(len(vulns), 1)
vuln = vulns[0]
self.assertEquals(vuln.get_method(), 'GET')
self.assertEquals(vuln.get_url().url_string, TestOpenAPIFuzzURLParts.vulnerable_url)
| 36.106576
| 114
| 0.560384
|
a5522282d2b609530afeafa2023d013cc5495f97
| 3,570
|
py
|
Python
|
list_routes.py
|
Yusef28/todoskeleton
|
f6965c4eec2e732cd0f6f72f7d35866fdbb6c961
|
[
"MIT"
] | null | null | null |
list_routes.py
|
Yusef28/todoskeleton
|
f6965c4eec2e732cd0f6f72f7d35866fdbb6c961
|
[
"MIT"
] | null | null | null |
list_routes.py
|
Yusef28/todoskeleton
|
f6965c4eec2e732cd0f6f72f7d35866fdbb6c961
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
list_routes.py
"""
#Built-in/Generic
import datetime
#Libs
from flask import Flask, g, redirect, render_template, request, url_for, session, flash
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy import (
Table, Column, Integer, String, MetaData, ForeignKey, Boolean
)
#Modules
from flask_app import db, app
from models import User, List, Task
@app.route("/list_create", methods=('GET', 'POST'))
def list_create():
if request.method == 'POST':
title = request.form['new_list']
if check_list_exists(title):
flash('A list with this name already exists')
return redirect(url_for('dashboard'))
lists = db.session.query(List).filter_by(parent_user=session["user_id"]).all()
old_current_list = find_current_list(lists)
new_current_list = List(title=request.form['new_list'], parent_user=session['user_id'])
#This needs to be here before the change/choose function calls
#because they query for this list and then update current
db.session.add(new_current_list)
db.session.commit()
#when you create a new list, switch to immediately
if old_current_list:
change_current_list(old_current_list.id , new_current_list.id)
else:
choose_current_list(new_current_list.id)
print('List *'+new_current_list.title+'* for user with id: *'+str(session['user_id'])+'* created!')
flash('List *'+new_current_list.title+'* for user with id: *'+str(session['user_id'])+'* created!')
return redirect(request.referrer)
@app.route("/list_update/<int:id>", methods=('GET', 'POST'))
def list_update(id):
if request.method == 'POST':
new_title = request.form['list_title_change_input']
if check_list_exists(new_title):
flash('A list with this name already exists')
return redirect(url_for('dashboard'))
list = db.session.query(List).get(id)
old_title = list.title
list.title = new_title
db.session.commit()
print('List title from !'+old_title+'! to *'+list.title+'* changed!')
return redirect(url_for('dashboard'))
@app.route("/list_delete/<int:id>")
def list_delete(id):
list = db.session.query(List).get(id)
db.session.delete(list)
db.session.commit()
print('List *'+list.title+'* Deleted!')
return redirect(url_for('dashboard'))
#find the list with current=True
@app.route("/find_current_list")
def find_current_list(lists):
for list in lists:
if list.current == True:
return list
return None
@app.route("/change_current_list/<int:old_list_id>/<int:new_list_id>/")
def change_current_list(old_list_id, new_list_id):
new_list = db.session.query(List).get(new_list_id)
if not new_list:
print("List ID not available")
elif new_list.id == old_list_id:
print("You are already viewing this list")
else:
old_list = db.session.query(List).get(old_list_id)
old_list.current, new_list.current = new_list.current, old_list.current
db.session.commit()
#user = user_read(session['user_id'])
return redirect(url_for('dashboard'))
@app.route("/choose_current_list/<int:new_list_id>/")
def choose_current_list(new_list_id):
new_list = db.session.query(List).get(new_list_id)
if not new_list:
print("List ID not available")
else:
new_list.current = True
db.session.commit()
return redirect(url_for('dashboard'))
@app.route("/check_list_exists/<string:title>/")
def check_list_exists(title):
return db.session.query(List).filter_by(title=title, parent_user=session["user_id"]).first()
| 28.333333
| 102
| 0.7
|
88988d2bc89b911223c66b3ec58944ac0cff2dfc
| 1,714
|
py
|
Python
|
stanCode_Projects/my_photoshop/mirror_lake.py
|
fangchingsu/stanCode_Turing
|
d114ac8bc86855871e1da94bc58a8f9e6d78ef6e
|
[
"MIT"
] | null | null | null |
stanCode_Projects/my_photoshop/mirror_lake.py
|
fangchingsu/stanCode_Turing
|
d114ac8bc86855871e1da94bc58a8f9e6d78ef6e
|
[
"MIT"
] | null | null | null |
stanCode_Projects/my_photoshop/mirror_lake.py
|
fangchingsu/stanCode_Turing
|
d114ac8bc86855871e1da94bc58a8f9e6d78ef6e
|
[
"MIT"
] | null | null | null |
"""
File: mirror_lake.py
----------------------------------
This file reads in mt-rainier.jpg and
makes a new image that creates a mirror
lake vibe by placing the inverse image of
mt-rainier.jpg below the original one
"""
from simpleimage import SimpleImage
def reflect(filename):
"""
:param filename: SimpleImage, the file path of the original image
:return new_img: SimpleImage, photo height is 2 times larger than original photo and get its mirror photo
"""
# setup new image size
new_img = SimpleImage.blank(filename.width, filename.height * 2)
# run the height and width in original photo
for y in range(filename.height):
for x in range(filename.width):
# get original photo pixel
img_pixel = filename.get_pixel(x, y)
# get up and down pixel
new_pixel1 = new_img.get_pixel(x, y)
new_pixel2 = new_img.get_pixel(x, new_img.height-1-y)
# fill the up empty pixel in new image
new_pixel1.red = img_pixel.red
new_pixel1.green = img_pixel.green
new_pixel1.blue = img_pixel.blue
# fill the down empty pixel in new image
new_pixel2.red = img_pixel.red
new_pixel2.green = img_pixel.green
new_pixel2.blue = img_pixel.blue
# return updated new image
return new_img
def main():
"""
read the original photo and get its mirror photo
"""
# read the original photo
original_mt = SimpleImage('images/mt-rainier.jpg')
# show photo
original_mt.show()
# update photo
reflected = reflect(original_mt)
# show updated photo
reflected.show()
if __name__ == '__main__':
main()
| 31.163636
| 109
| 0.640023
|
543d2e987e89158f9950bbd20c4f4b00d9f78318
| 2,953
|
py
|
Python
|
spotify_ripper/eventloop.py
|
s0ng/spotify-ripper
|
d0464193dead7bd3ac7580e98bde86a0f323acae
|
[
"MIT"
] | 536
|
2016-10-05T15:31:29.000Z
|
2022-03-27T22:24:34.000Z
|
spotify_ripper/eventloop.py
|
jilgue/spotify-ripper
|
425b01b70d2f5b681cba750ba0b7ff892a7efe4b
|
[
"MIT"
] | 62
|
2016-10-11T01:53:38.000Z
|
2022-02-07T08:35:39.000Z
|
spotify_ripper/eventloop.py
|
jilgue/spotify-ripper
|
425b01b70d2f5b681cba750ba0b7ff892a7efe4b
|
[
"MIT"
] | 148
|
2016-10-04T11:54:01.000Z
|
2022-03-27T22:24:55.000Z
|
# -*- coding: utf-8 -*-
# From PySpotify's EventLoop
# https://github.com/mopidy/pyspotify/blob/v2.x/master/spotify/eventloop.py
from __future__ import unicode_literals
from colorama import Fore
import threading
try:
# Python 3
import queue
except ImportError:
# Python 2
import Queue as queue
import spotify
__all__ = [
'EventLoop',
]
class EventLoop(threading.Thread):
"""Event loop for automatically processing events from libspotify.
The event loop is a :class:`~threading.Thread` that listens to
:attr:`~spotify.SessionEvent.NOTIFY_MAIN_THREAD` events and calls
:meth:`~spotify.Session.process_events` when needed.
To use it, pass it your :class:`~spotify.Session` instance and call
:meth:`start`::
>>> session = spotify.Session()
>>> event_loop = EventLoop(session)
>>> event_loop.start()
.. warning::
If you use :class:`EventLoop` to process the libspotify events, any
event listeners you've registered will be called from the event loop
thread. pyspotify itself is thread safe, but you'll need to ensure that
you have proper synchronization in your own application code, as always
when working with threads.
"""
name = 'SpotifyEventLoop'
def __init__(self, session, timeout, ripper):
threading.Thread.__init__(self)
self._session = session
self._runnable = True
self._queue_timeout = timeout * 1000
self._queue = queue.Queue()
self._ripper = ripper
def start(self):
"""Start the event loop."""
self._session.on(
spotify.SessionEvent.NOTIFY_MAIN_THREAD,
self._on_notify_main_thread)
threading.Thread.start(self)
def stop(self):
"""Stop the event loop."""
self._runnable = False
self._session.off(
spotify.SessionEvent.NOTIFY_MAIN_THREAD,
self._on_notify_main_thread)
def run(self):
timeout_countdown = self._session.process_events()
while self._runnable and self._ripper.isAlive():
timeout = min(timeout_countdown, self._queue_timeout)
try:
self._queue.get(timeout=(timeout / 1000.0))
except queue.Empty:
# queue timeout
timeout_countdown -= timeout
else:
# notification
timeout_countdown = 0
finally:
if timeout_countdown <= 0:
timeout_countdown = self._session.process_events()
def _on_notify_main_thread(self, session):
# WARNING: This event listener is called from an internal libspotify
# thread. It must not block.
try:
self._queue.put_nowait(1)
except queue.Full:
print(Fore.RED +
"event loop queue full. dropped notification event" +
Fore.RESET)
| 28.95098
| 79
| 0.62445
|
28ad4eff66b27f5a0ac679edc3b4c26f48f6f286
| 22,529
|
py
|
Python
|
2015/2015_16a.py
|
davidxiao93/Advent-of-Code
|
29503100ae4eb46b048fc3ab68ff0181c6f00ee5
|
[
"MIT"
] | null | null | null |
2015/2015_16a.py
|
davidxiao93/Advent-of-Code
|
29503100ae4eb46b048fc3ab68ff0181c6f00ee5
|
[
"MIT"
] | null | null | null |
2015/2015_16a.py
|
davidxiao93/Advent-of-Code
|
29503100ae4eb46b048fc3ab68ff0181c6f00ee5
|
[
"MIT"
] | null | null | null |
input = """Sue 1: goldfish: 6, trees: 9, akitas: 0
Sue 2: goldfish: 7, trees: 1, akitas: 0
Sue 3: cars: 10, akitas: 6, perfumes: 7
Sue 4: perfumes: 2, vizslas: 0, cars: 6
Sue 5: goldfish: 1, trees: 3, perfumes: 10
Sue 6: children: 9, vizslas: 7, cars: 9
Sue 7: cars: 6, vizslas: 5, cats: 3
Sue 8: akitas: 10, vizslas: 9, children: 3
Sue 9: vizslas: 8, cats: 2, trees: 1
Sue 10: perfumes: 10, trees: 6, cars: 4
Sue 11: cars: 9, children: 1, cats: 1
Sue 12: pomeranians: 4, akitas: 6, goldfish: 8
Sue 13: cats: 10, children: 5, trees: 9
Sue 14: perfumes: 8, vizslas: 3, samoyeds: 1
Sue 15: vizslas: 2, perfumes: 8, trees: 3
Sue 16: pomeranians: 10, trees: 9, samoyeds: 4
Sue 17: akitas: 7, vizslas: 0, goldfish: 6
Sue 18: trees: 5, vizslas: 9, cars: 0
Sue 19: akitas: 3, goldfish: 9, trees: 10
Sue 20: perfumes: 7, samoyeds: 3, vizslas: 10
Sue 21: perfumes: 7, pomeranians: 10, akitas: 8
Sue 22: vizslas: 6, trees: 8, akitas: 10
Sue 23: goldfish: 0, trees: 4, children: 9
Sue 24: goldfish: 7, pomeranians: 9, akitas: 4
Sue 25: cars: 7, trees: 4, pomeranians: 4
Sue 26: trees: 9, akitas: 9, pomeranians: 7
Sue 27: samoyeds: 0, perfumes: 9, goldfish: 10
Sue 28: cars: 5, trees: 7, vizslas: 1
Sue 29: perfumes: 9, trees: 1, children: 6
Sue 30: goldfish: 10, trees: 0, cars: 4
Sue 31: akitas: 2, perfumes: 5, goldfish: 5
Sue 32: goldfish: 0, akitas: 5, trees: 0
Sue 33: vizslas: 2, akitas: 2, samoyeds: 3
Sue 34: goldfish: 8, perfumes: 5, cars: 3
Sue 35: akitas: 1, cats: 4, trees: 9
Sue 36: cars: 4, vizslas: 4, goldfish: 7
Sue 37: akitas: 5, perfumes: 7, trees: 3
Sue 38: goldfish: 10, trees: 2, vizslas: 9
Sue 39: goldfish: 4, pomeranians: 5, vizslas: 5
Sue 40: perfumes: 5, samoyeds: 4, akitas: 6
Sue 41: goldfish: 9, cars: 4, perfumes: 5
Sue 42: trees: 6, pomeranians: 9, goldfish: 8
Sue 43: perfumes: 7, pomeranians: 1, akitas: 2
Sue 44: vizslas: 9, cars: 5, cats: 0
Sue 45: akitas: 1, goldfish: 6, trees: 0
Sue 46: akitas: 5, vizslas: 8, trees: 2
Sue 47: trees: 9, akitas: 2, vizslas: 9
Sue 48: goldfish: 10, trees: 5, akitas: 2
Sue 49: cars: 7, vizslas: 2, perfumes: 6
Sue 50: akitas: 5, goldfish: 6, perfumes: 0
Sue 51: cars: 9, cats: 7, trees: 5
Sue 52: akitas: 7, goldfish: 10, cars: 0
Sue 53: cars: 10, cats: 4, perfumes: 2
Sue 54: goldfish: 2, pomeranians: 5, perfumes: 10
Sue 55: vizslas: 5, akitas: 4, cars: 8
Sue 56: goldfish: 9, vizslas: 4, akitas: 5
Sue 57: perfumes: 8, samoyeds: 7, cars: 9
Sue 58: cars: 5, akitas: 7, perfumes: 8
Sue 59: samoyeds: 8, cars: 10, vizslas: 10
Sue 60: akitas: 6, samoyeds: 0, goldfish: 3
Sue 61: trees: 8, pomeranians: 0, akitas: 2
Sue 62: trees: 1, perfumes: 3, vizslas: 4
Sue 63: vizslas: 6, samoyeds: 9, goldfish: 8
Sue 64: goldfish: 7, trees: 6, vizslas: 3
Sue 65: cars: 1, vizslas: 0, akitas: 6
Sue 66: cats: 6, pomeranians: 4, cars: 9
Sue 67: trees: 10, pomeranians: 7, samoyeds: 3
Sue 68: pomeranians: 5, goldfish: 9, akitas: 1
Sue 69: akitas: 1, vizslas: 0, trees: 9
Sue 70: cats: 4, goldfish: 4, vizslas: 10
Sue 71: vizslas: 7, perfumes: 7, trees: 8
Sue 72: children: 2, vizslas: 9, cats: 3
Sue 73: cars: 8, pomeranians: 0, perfumes: 6
Sue 74: akitas: 1, pomeranians: 8, vizslas: 10
Sue 75: vizslas: 5, perfumes: 5, cars: 7
Sue 76: cars: 3, vizslas: 3, goldfish: 0
Sue 77: akitas: 9, samoyeds: 1, pomeranians: 3
Sue 78: trees: 0, vizslas: 0, akitas: 6
Sue 79: pomeranians: 9, cars: 1, perfumes: 0
Sue 80: perfumes: 10, trees: 1, cats: 0
Sue 81: goldfish: 5, akitas: 9, trees: 0
Sue 82: vizslas: 1, akitas: 6, children: 4
Sue 83: samoyeds: 7, perfumes: 8, pomeranians: 4
Sue 84: perfumes: 3, children: 3, cats: 7
Sue 85: goldfish: 9, trees: 3, cars: 9
Sue 86: cars: 0, perfumes: 9, vizslas: 0
Sue 87: children: 3, trees: 4, akitas: 3
Sue 88: trees: 1, samoyeds: 1, goldfish: 0
Sue 89: akitas: 8, cars: 3, vizslas: 9
Sue 90: pomeranians: 9, trees: 9, goldfish: 8
Sue 91: goldfish: 7, trees: 10, children: 0
Sue 92: cats: 9, cars: 7, perfumes: 7
Sue 93: vizslas: 2, goldfish: 7, cats: 9
Sue 94: akitas: 5, cars: 8, vizslas: 4
Sue 95: goldfish: 7, vizslas: 1, perfumes: 2
Sue 96: goldfish: 5, trees: 6, perfumes: 10
Sue 97: trees: 0, perfumes: 7, cars: 0
Sue 98: cars: 2, perfumes: 6, trees: 8
Sue 99: trees: 10, children: 7, cats: 9
Sue 100: samoyeds: 5, goldfish: 6, vizslas: 6
Sue 101: cars: 10, perfumes: 9, vizslas: 3
Sue 102: pomeranians: 6, trees: 1, samoyeds: 4
Sue 103: cars: 2, perfumes: 1, goldfish: 5
Sue 104: goldfish: 2, cars: 8, pomeranians: 2
Sue 105: goldfish: 6, vizslas: 0, trees: 10
Sue 106: trees: 10, akitas: 10, pomeranians: 0
Sue 107: vizslas: 2, pomeranians: 10, trees: 3
Sue 108: children: 3, vizslas: 8, akitas: 7
Sue 109: perfumes: 2, akitas: 2, samoyeds: 3
Sue 110: goldfish: 7, trees: 1, perfumes: 1
Sue 111: akitas: 2, cars: 9, perfumes: 2
Sue 112: children: 10, cars: 0, akitas: 3
Sue 113: akitas: 9, vizslas: 4, children: 3
Sue 114: pomeranians: 3, trees: 2, goldfish: 5
Sue 115: perfumes: 8, cars: 6, trees: 0
Sue 116: samoyeds: 6, children: 3, pomeranians: 1
Sue 117: goldfish: 1, trees: 2, akitas: 1
Sue 118: goldfish: 10, akitas: 10, samoyeds: 0
Sue 119: vizslas: 10, perfumes: 6, cars: 0
Sue 120: cars: 2, perfumes: 9, goldfish: 5
Sue 121: vizslas: 2, trees: 2, cars: 6
Sue 122: vizslas: 3, trees: 0, akitas: 2
Sue 123: akitas: 5, samoyeds: 7, goldfish: 1
Sue 124: goldfish: 8, samoyeds: 7, trees: 8
Sue 125: trees: 3, goldfish: 8, perfumes: 5
Sue 126: cats: 3, vizslas: 9, goldfish: 0
Sue 127: pomeranians: 9, goldfish: 3, perfumes: 6
Sue 128: vizslas: 4, cars: 8, goldfish: 5
Sue 129: vizslas: 8, children: 5, perfumes: 8
Sue 130: cars: 7, children: 7, cats: 3
Sue 131: perfumes: 1, akitas: 8, vizslas: 9
Sue 132: perfumes: 7, samoyeds: 10, pomeranians: 6
Sue 133: cars: 5, perfumes: 3, goldfish: 7
Sue 134: perfumes: 9, akitas: 2, cats: 3
Sue 135: perfumes: 1, trees: 9, vizslas: 9
Sue 136: akitas: 7, cars: 3, perfumes: 7
Sue 137: vizslas: 9, goldfish: 8, cars: 5
Sue 138: trees: 0, samoyeds: 1, cars: 3
Sue 139: cars: 0, perfumes: 6, trees: 0
Sue 140: pomeranians: 4, cars: 1, perfumes: 7
Sue 141: vizslas: 10, akitas: 8, cats: 3
Sue 142: trees: 1, cats: 6, vizslas: 5
Sue 143: pomeranians: 9, cars: 7, perfumes: 9
Sue 144: cars: 0, perfumes: 2, pomeranians: 1
Sue 145: trees: 1, goldfish: 9, perfumes: 8
Sue 146: cars: 8, children: 5, vizslas: 2
Sue 147: perfumes: 2, goldfish: 5, cars: 0
Sue 148: akitas: 2, perfumes: 7, pomeranians: 6
Sue 149: goldfish: 8, cars: 0, trees: 1
Sue 150: akitas: 6, perfumes: 5, trees: 0
Sue 151: vizslas: 6, samoyeds: 8, akitas: 10
Sue 152: trees: 7, akitas: 7, perfumes: 6
Sue 153: goldfish: 9, cats: 9, cars: 3
Sue 154: vizslas: 10, trees: 0, cars: 9
Sue 155: perfumes: 3, children: 2, goldfish: 1
Sue 156: goldfish: 7, perfumes: 5, akitas: 6
Sue 157: cats: 10, trees: 1, goldfish: 0
Sue 158: cats: 7, children: 7, vizslas: 6
Sue 159: perfumes: 9, akitas: 0, cars: 0
Sue 160: akitas: 3, goldfish: 10, pomeranians: 2
Sue 161: goldfish: 10, cars: 6, perfumes: 3
Sue 162: trees: 0, cars: 9, goldfish: 1
Sue 163: cars: 8, perfumes: 9, vizslas: 5
Sue 164: goldfish: 1, trees: 10, children: 6
Sue 165: goldfish: 0, vizslas: 6, cars: 0
Sue 166: akitas: 5, vizslas: 1, cars: 5
Sue 167: vizslas: 1, samoyeds: 1, children: 4
Sue 168: samoyeds: 7, vizslas: 7, akitas: 3
Sue 169: goldfish: 3, cats: 9, trees: 2
Sue 170: cars: 5, perfumes: 9, vizslas: 5
Sue 171: goldfish: 7, cars: 6, perfumes: 10
Sue 172: cats: 6, akitas: 1, children: 6
Sue 173: cats: 4, goldfish: 1, children: 3
Sue 174: cars: 2, pomeranians: 2, vizslas: 7
Sue 175: trees: 0, children: 4, goldfish: 7
Sue 176: children: 8, cars: 5, cats: 9
Sue 177: pomeranians: 4, vizslas: 7, trees: 3
Sue 178: vizslas: 6, perfumes: 10, akitas: 6
Sue 179: cars: 4, akitas: 4, trees: 4
Sue 180: akitas: 8, goldfish: 6, trees: 9
Sue 181: perfumes: 3, vizslas: 10, cars: 3
Sue 182: vizslas: 3, samoyeds: 3, goldfish: 7
Sue 183: goldfish: 10, perfumes: 2, cats: 1
Sue 184: goldfish: 5, trees: 1, perfumes: 1
Sue 185: vizslas: 10, trees: 9, perfumes: 2
Sue 186: goldfish: 6, perfumes: 9, trees: 1
Sue 187: cars: 0, trees: 9, goldfish: 6
Sue 188: cars: 0, trees: 1, vizslas: 9
Sue 189: akitas: 7, vizslas: 2, trees: 0
Sue 190: pomeranians: 5, perfumes: 8, akitas: 10
Sue 191: vizslas: 5, akitas: 3, cats: 0
Sue 192: children: 1, trees: 1, cars: 2
Sue 193: cars: 3, goldfish: 9, trees: 2
Sue 194: samoyeds: 3, akitas: 4, perfumes: 8
Sue 195: trees: 1, vizslas: 8, akitas: 10
Sue 196: akitas: 6, cars: 5, pomeranians: 0
Sue 197: akitas: 5, vizslas: 5, cats: 1
Sue 198: trees: 4, cars: 6, goldfish: 6
Sue 199: cats: 7, cars: 5, goldfish: 6
Sue 200: vizslas: 4, cats: 0, akitas: 9
Sue 201: pomeranians: 1, perfumes: 4, children: 2
Sue 202: cats: 1, perfumes: 4, vizslas: 3
Sue 203: vizslas: 1, akitas: 9, children: 5
Sue 204: perfumes: 8, cars: 7, trees: 4
Sue 205: perfumes: 7, pomeranians: 5, cats: 9
Sue 206: vizslas: 8, trees: 2, akitas: 2
Sue 207: akitas: 6, vizslas: 2, perfumes: 10
Sue 208: vizslas: 1, children: 7, akitas: 4
Sue 209: perfumes: 4, trees: 2, children: 1
Sue 210: goldfish: 0, vizslas: 2, samoyeds: 10
Sue 211: cars: 8, perfumes: 3, trees: 1
Sue 212: cars: 8, samoyeds: 5, pomeranians: 8
Sue 213: akitas: 2, goldfish: 8, pomeranians: 2
Sue 214: akitas: 6, pomeranians: 2, cars: 0
Sue 215: trees: 10, pomeranians: 4, vizslas: 0
Sue 216: perfumes: 0, cars: 8, trees: 0
Sue 217: samoyeds: 8, akitas: 7, children: 10
Sue 218: perfumes: 1, vizslas: 6, children: 0
Sue 219: children: 1, goldfish: 4, trees: 1
Sue 220: akitas: 10, goldfish: 10, trees: 5
Sue 221: cars: 7, pomeranians: 6, perfumes: 3
Sue 222: vizslas: 6, children: 0, akitas: 5
Sue 223: perfumes: 9, cars: 1, trees: 6
Sue 224: pomeranians: 1, trees: 0, vizslas: 0
Sue 225: goldfish: 8, akitas: 4, perfumes: 10
Sue 226: pomeranians: 7, cats: 7, children: 4
Sue 227: trees: 0, akitas: 2, perfumes: 1
Sue 228: vizslas: 6, cars: 10, perfumes: 9
Sue 229: cars: 0, perfumes: 6, trees: 4
Sue 230: pomeranians: 7, perfumes: 5, trees: 2
Sue 231: goldfish: 9, cars: 6, trees: 7
Sue 232: akitas: 1, vizslas: 5, cars: 3
Sue 233: akitas: 7, samoyeds: 2, vizslas: 5
Sue 234: akitas: 6, cats: 8, pomeranians: 0
Sue 235: pomeranians: 5, akitas: 5, vizslas: 3
Sue 236: goldfish: 5, trees: 6, akitas: 5
Sue 237: goldfish: 9, perfumes: 5, cats: 5
Sue 238: cats: 8, goldfish: 4, perfumes: 0
Sue 239: samoyeds: 8, children: 6, pomeranians: 6
Sue 240: akitas: 4, samoyeds: 10, trees: 8
Sue 241: trees: 2, goldfish: 8, cars: 1
Sue 242: perfumes: 2, cars: 0, akitas: 10
Sue 243: pomeranians: 1, cars: 7, trees: 2
Sue 244: trees: 9, vizslas: 2, akitas: 10
Sue 245: cars: 9, pomeranians: 4, trees: 0
Sue 246: cars: 9, pomeranians: 7, perfumes: 1
Sue 247: trees: 0, goldfish: 1, akitas: 8
Sue 248: vizslas: 1, cats: 4, akitas: 4
Sue 249: cats: 6, children: 4, goldfish: 9
Sue 250: vizslas: 1, cars: 10, samoyeds: 5
Sue 251: cars: 0, goldfish: 1, vizslas: 7
Sue 252: cars: 7, akitas: 9, vizslas: 10
Sue 253: akitas: 7, vizslas: 2, perfumes: 5
Sue 254: vizslas: 10, akitas: 5, samoyeds: 0
Sue 255: pomeranians: 8, goldfish: 0, cats: 6
Sue 256: cars: 10, goldfish: 8, vizslas: 9
Sue 257: goldfish: 3, perfumes: 9, cats: 3
Sue 258: trees: 6, goldfish: 6, cars: 6
Sue 259: trees: 0, goldfish: 2, perfumes: 8
Sue 260: trees: 5, akitas: 0, cars: 0
Sue 261: pomeranians: 9, goldfish: 7, perfumes: 8
Sue 262: perfumes: 8, vizslas: 6, goldfish: 2
Sue 263: vizslas: 6, trees: 5, goldfish: 9
Sue 264: vizslas: 4, perfumes: 7, cars: 9
Sue 265: goldfish: 10, trees: 3, perfumes: 1
Sue 266: trees: 10, akitas: 8, goldfish: 8
Sue 267: goldfish: 4, trees: 0, samoyeds: 9
Sue 268: vizslas: 1, trees: 0, goldfish: 8
Sue 269: cars: 2, perfumes: 10, goldfish: 5
Sue 270: perfumes: 7, cars: 2, vizslas: 1
Sue 271: cars: 6, perfumes: 10, goldfish: 6
Sue 272: samoyeds: 4, goldfish: 2, vizslas: 9
Sue 273: perfumes: 4, goldfish: 4, vizslas: 1
Sue 274: children: 4, cars: 4, perfumes: 3
Sue 275: children: 8, vizslas: 3, trees: 2
Sue 276: vizslas: 5, children: 7, perfumes: 3
Sue 277: perfumes: 3, cats: 4, vizslas: 5
Sue 278: cars: 1, samoyeds: 10, akitas: 2
Sue 279: trees: 9, perfumes: 9, cars: 10
Sue 280: vizslas: 5, trees: 0, perfumes: 6
Sue 281: vizslas: 3, akitas: 10, pomeranians: 7
Sue 282: trees: 1, children: 2, akitas: 8
Sue 283: akitas: 9, goldfish: 6, cats: 5
Sue 284: cars: 9, children: 10, pomeranians: 2
Sue 285: pomeranians: 0, perfumes: 4, cars: 7
Sue 286: perfumes: 0, vizslas: 10, akitas: 10
Sue 287: cats: 2, perfumes: 3, trees: 5
Sue 288: akitas: 9, vizslas: 8, samoyeds: 9
Sue 289: perfumes: 6, children: 2, cars: 7
Sue 290: akitas: 0, children: 5, cars: 5
Sue 291: cars: 4, perfumes: 0, trees: 1
Sue 292: cats: 0, cars: 8, perfumes: 6
Sue 293: akitas: 9, cats: 5, children: 5
Sue 294: akitas: 4, cars: 9, goldfish: 3
Sue 295: cars: 2, akitas: 3, perfumes: 7
Sue 296: perfumes: 4, cars: 7, goldfish: 10
Sue 297: trees: 5, akitas: 8, vizslas: 1
Sue 298: perfumes: 0, goldfish: 6, trees: 9
Sue 299: perfumes: 6, samoyeds: 8, cars: 1
Sue 300: goldfish: 10, perfumes: 4, akitas: 2
Sue 301: cars: 3, trees: 0, goldfish: 8
Sue 302: perfumes: 7, samoyeds: 2, vizslas: 7
Sue 303: children: 10, goldfish: 7, perfumes: 2
Sue 304: samoyeds: 8, vizslas: 2, cars: 1
Sue 305: trees: 1, cats: 0, goldfish: 10
Sue 306: trees: 4, perfumes: 2, cars: 7
Sue 307: cars: 6, vizslas: 2, children: 6
Sue 308: vizslas: 2, cars: 0, akitas: 7
Sue 309: cars: 3, vizslas: 8, perfumes: 6
Sue 310: goldfish: 7, perfumes: 7, vizslas: 3
Sue 311: pomeranians: 10, trees: 2, cars: 0
Sue 312: samoyeds: 2, vizslas: 9, akitas: 1
Sue 313: cars: 4, pomeranians: 7, goldfish: 7
Sue 314: akitas: 2, pomeranians: 9, samoyeds: 10
Sue 315: akitas: 3, vizslas: 2, trees: 0
Sue 316: cars: 0, perfumes: 4, pomeranians: 6
Sue 317: akitas: 10, goldfish: 3, pomeranians: 7
Sue 318: cars: 9, trees: 0, pomeranians: 9
Sue 319: akitas: 3, vizslas: 7, children: 10
Sue 320: vizslas: 0, akitas: 8, pomeranians: 4
Sue 321: cars: 10, akitas: 9, vizslas: 3
Sue 322: perfumes: 0, akitas: 8, vizslas: 6
Sue 323: vizslas: 10, perfumes: 5, cars: 3
Sue 324: akitas: 0, goldfish: 6, vizslas: 7
Sue 325: perfumes: 9, vizslas: 5, pomeranians: 2
Sue 326: vizslas: 6, goldfish: 10, pomeranians: 8
Sue 327: vizslas: 10, cars: 1, akitas: 7
Sue 328: trees: 1, perfumes: 10, cars: 10
Sue 329: pomeranians: 5, samoyeds: 3, cars: 10
Sue 330: akitas: 6, cars: 1, pomeranians: 4
Sue 331: cars: 5, children: 2, trees: 0
Sue 332: vizslas: 6, pomeranians: 1, perfumes: 0
Sue 333: akitas: 7, trees: 1, cats: 9
Sue 334: vizslas: 6, goldfish: 9, akitas: 7
Sue 335: akitas: 3, samoyeds: 3, cars: 3
Sue 336: samoyeds: 10, perfumes: 9, trees: 6
Sue 337: vizslas: 2, cars: 9, akitas: 0
Sue 338: akitas: 6, perfumes: 9, vizslas: 3
Sue 339: cars: 3, samoyeds: 8, trees: 2
Sue 340: cats: 7, perfumes: 8, cars: 9
Sue 341: goldfish: 9, perfumes: 5, cars: 10
Sue 342: trees: 0, akitas: 3, perfumes: 5
Sue 343: perfumes: 2, children: 0, cars: 6
Sue 344: goldfish: 8, trees: 8, perfumes: 0
Sue 345: perfumes: 6, cars: 6, goldfish: 5
Sue 346: vizslas: 8, trees: 1, cars: 6
Sue 347: cars: 0, cats: 3, perfumes: 7
Sue 348: children: 7, perfumes: 10, cars: 7
Sue 349: pomeranians: 8, akitas: 5, children: 2
Sue 350: perfumes: 9, pomeranians: 4, goldfish: 3
Sue 351: perfumes: 8, pomeranians: 7, trees: 4
Sue 352: samoyeds: 1, goldfish: 9, akitas: 8
Sue 353: akitas: 6, goldfish: 10, vizslas: 8
Sue 354: akitas: 7, cars: 2, goldfish: 6
Sue 355: cars: 3, goldfish: 6, akitas: 5
Sue 356: akitas: 2, goldfish: 9, pomeranians: 1
Sue 357: goldfish: 10, cars: 6, pomeranians: 9
Sue 358: trees: 0, children: 2, goldfish: 6
Sue 359: samoyeds: 3, cars: 2, akitas: 4
Sue 360: trees: 1, goldfish: 8, cars: 5
Sue 361: akitas: 5, vizslas: 7, perfumes: 1
Sue 362: cats: 5, vizslas: 9, children: 4
Sue 363: goldfish: 9, perfumes: 3, vizslas: 9
Sue 364: children: 7, samoyeds: 2, pomeranians: 10
Sue 365: perfumes: 9, akitas: 10, pomeranians: 4
Sue 366: cars: 10, trees: 3, cats: 4
Sue 367: vizslas: 6, akitas: 10, perfumes: 5
Sue 368: akitas: 9, vizslas: 9, children: 4
Sue 369: goldfish: 8, trees: 2, perfumes: 5
Sue 370: trees: 0, children: 4, cars: 8
Sue 371: cats: 6, perfumes: 0, vizslas: 2
Sue 372: akitas: 7, cars: 5, perfumes: 3
Sue 373: cars: 0, perfumes: 4, pomeranians: 10
Sue 374: akitas: 5, perfumes: 5, vizslas: 2
Sue 375: goldfish: 7, trees: 10, pomeranians: 7
Sue 376: cars: 8, trees: 1, pomeranians: 8
Sue 377: cars: 0, akitas: 9, vizslas: 1
Sue 378: akitas: 5, perfumes: 3, vizslas: 7
Sue 379: trees: 2, goldfish: 8, pomeranians: 8
Sue 380: akitas: 5, cars: 9, perfumes: 9
Sue 381: cars: 2, perfumes: 6, trees: 3
Sue 382: perfumes: 6, vizslas: 2, goldfish: 9
Sue 383: akitas: 8, vizslas: 7, cats: 1
Sue 384: akitas: 9, trees: 10, vizslas: 7
Sue 385: cars: 0, perfumes: 7, vizslas: 2
Sue 386: vizslas: 10, akitas: 4, perfumes: 9
Sue 387: perfumes: 6, pomeranians: 5, samoyeds: 8
Sue 388: vizslas: 10, trees: 9, goldfish: 9
Sue 389: goldfish: 8, akitas: 4, perfumes: 10
Sue 390: goldfish: 6, trees: 8, akitas: 1
Sue 391: vizslas: 4, akitas: 10, goldfish: 7
Sue 392: akitas: 1, vizslas: 6, samoyeds: 5
Sue 393: trees: 6, cars: 3, akitas: 5
Sue 394: goldfish: 9, trees: 3, cars: 5
Sue 395: akitas: 6, samoyeds: 4, goldfish: 4
Sue 396: akitas: 2, trees: 1, cats: 5
Sue 397: cars: 0, children: 9, trees: 10
Sue 398: pomeranians: 3, samoyeds: 9, goldfish: 10
Sue 399: cars: 7, akitas: 4, goldfish: 8
Sue 400: cars: 4, akitas: 5, vizslas: 4
Sue 401: pomeranians: 5, akitas: 8, vizslas: 5
Sue 402: cats: 7, cars: 6, goldfish: 6
Sue 403: samoyeds: 8, perfumes: 4, cars: 5
Sue 404: akitas: 10, goldfish: 4, trees: 2
Sue 405: trees: 8, perfumes: 1, cars: 2
Sue 406: trees: 0, perfumes: 9, pomeranians: 10
Sue 407: perfumes: 4, trees: 7, goldfish: 3
Sue 408: akitas: 1, perfumes: 3, cars: 5
Sue 409: trees: 6, samoyeds: 3, cars: 9
Sue 410: vizslas: 3, goldfish: 5, akitas: 7
Sue 411: goldfish: 10, trees: 1, vizslas: 9
Sue 412: cars: 0, akitas: 6, trees: 6
Sue 413: goldfish: 7, trees: 0, cars: 3
Sue 414: pomeranians: 10, samoyeds: 3, cars: 10
Sue 415: perfumes: 6, trees: 9, cars: 4
Sue 416: trees: 2, cars: 4, goldfish: 8
Sue 417: goldfish: 2, cars: 9, cats: 5
Sue 418: vizslas: 1, cars: 9, akitas: 0
Sue 419: perfumes: 6, cats: 3, children: 9
Sue 420: cats: 5, goldfish: 7, akitas: 9
Sue 421: trees: 1, samoyeds: 6, pomeranians: 1
Sue 422: trees: 10, goldfish: 6, children: 7
Sue 423: cars: 8, goldfish: 7, vizslas: 3
Sue 424: samoyeds: 9, akitas: 7, trees: 5
Sue 425: akitas: 5, children: 4, perfumes: 9
Sue 426: goldfish: 1, children: 9, cats: 2
Sue 427: vizslas: 9, akitas: 7, goldfish: 9
Sue 428: pomeranians: 7, akitas: 5, vizslas: 1
Sue 429: vizslas: 7, goldfish: 7, cars: 9
Sue 430: trees: 7, perfumes: 0, pomeranians: 5
Sue 431: children: 9, perfumes: 5, vizslas: 7
Sue 432: trees: 6, samoyeds: 7, cats: 1
Sue 433: goldfish: 5, trees: 5, children: 6
Sue 434: goldfish: 9, akitas: 7, cars: 3
Sue 435: samoyeds: 10, perfumes: 2, cars: 0
Sue 436: akitas: 5, pomeranians: 4, perfumes: 7
Sue 437: vizslas: 5, cats: 6, perfumes: 5
Sue 438: trees: 2, goldfish: 6, vizslas: 7
Sue 439: samoyeds: 8, pomeranians: 10, goldfish: 1
Sue 440: akitas: 6, children: 9, perfumes: 4
Sue 441: cars: 2, goldfish: 9, children: 0
Sue 442: goldfish: 7, cars: 2, vizslas: 8
Sue 443: goldfish: 6, samoyeds: 3, perfumes: 2
Sue 444: trees: 2, goldfish: 7, cars: 8
Sue 445: trees: 2, pomeranians: 0, children: 0
Sue 446: perfumes: 4, akitas: 4, goldfish: 6
Sue 447: vizslas: 7, akitas: 9, cars: 3
Sue 448: goldfish: 6, trees: 9, cars: 0
Sue 449: samoyeds: 7, perfumes: 4, vizslas: 10
Sue 450: akitas: 7, cars: 10, goldfish: 7
Sue 451: goldfish: 4, children: 7, pomeranians: 4
Sue 452: cats: 4, vizslas: 6, trees: 7
Sue 453: cars: 1, trees: 10, goldfish: 9
Sue 454: trees: 2, goldfish: 3, vizslas: 10
Sue 455: pomeranians: 9, vizslas: 3, akitas: 2
Sue 456: vizslas: 10, akitas: 2, goldfish: 1
Sue 457: trees: 5, cats: 5, children: 8
Sue 458: cars: 6, goldfish: 3, akitas: 9
Sue 459: goldfish: 7, akitas: 2, cats: 7
Sue 460: akitas: 1, cars: 5, children: 8
Sue 461: cars: 8, perfumes: 0, goldfish: 6
Sue 462: pomeranians: 6, cats: 2, perfumes: 6
Sue 463: vizslas: 7, perfumes: 3, goldfish: 3
Sue 464: akitas: 10, goldfish: 10, trees: 1
Sue 465: vizslas: 0, akitas: 2, trees: 2
Sue 466: perfumes: 6, akitas: 8, cars: 2
Sue 467: goldfish: 1, cars: 10, perfumes: 3
Sue 468: goldfish: 4, trees: 2, cars: 9
Sue 469: perfumes: 6, pomeranians: 0, vizslas: 10
Sue 470: samoyeds: 8, children: 0, akitas: 7
Sue 471: children: 3, goldfish: 9, cats: 9
Sue 472: samoyeds: 0, goldfish: 0, trees: 0
Sue 473: trees: 3, goldfish: 4, vizslas: 1
Sue 474: perfumes: 10, cars: 3, trees: 7
Sue 475: akitas: 5, vizslas: 4, goldfish: 5
Sue 476: children: 2, akitas: 7, vizslas: 3
Sue 477: vizslas: 6, pomeranians: 9, trees: 6
Sue 478: vizslas: 7, pomeranians: 6, akitas: 7
Sue 479: trees: 2, perfumes: 2, children: 2
Sue 480: cars: 8, cats: 5, vizslas: 0
Sue 481: trees: 5, goldfish: 0, akitas: 3
Sue 482: cars: 8, perfumes: 6, goldfish: 10
Sue 483: goldfish: 0, cars: 3, perfumes: 10
Sue 484: pomeranians: 1, samoyeds: 1, perfumes: 3
Sue 485: trees: 0, akitas: 2, vizslas: 4
Sue 486: cars: 3, vizslas: 8, goldfish: 1
Sue 487: pomeranians: 9, vizslas: 2, children: 10
Sue 488: akitas: 6, vizslas: 10, perfumes: 9
Sue 489: goldfish: 6, vizslas: 4, cars: 2
Sue 490: vizslas: 10, cats: 8, samoyeds: 1
Sue 491: cats: 9, cars: 1, perfumes: 10
Sue 492: goldfish: 6, cars: 9, pomeranians: 9
Sue 493: children: 10, goldfish: 10, vizslas: 0
Sue 494: pomeranians: 5, cars: 0, vizslas: 0
Sue 495: vizslas: 7, perfumes: 6, samoyeds: 3
Sue 496: trees: 1, cats: 4, cars: 10
Sue 497: cats: 1, perfumes: 0, cars: 7
Sue 498: perfumes: 7, vizslas: 6, cats: 9
Sue 499: vizslas: 8, perfumes: 1, akitas: 3
Sue 500: perfumes: 4, cars: 9, trees: 4"""
known_details = {
"children": 3,
"cats": 7,
"samoyeds": 2,
"pomeranians": 3,
"akitas": 0,
"vizslas": 0,
"goldfish": 5,
"trees": 3,
"cars": 2,
"perfumes": 1
}
class Sue:
def __init__(self, id, details):
self.id = id
self.details = details
def matches(self):
for key, value in known_details.items():
if key in self.details:
if self.details[key] != known_details[key]:
return False
return True
sues = []
for line in input.splitlines():
sue_id, all_values = line.split(":", 1)
id = sue_id.split()[-1]
values = all_values.split(",")
d = {}
for value in values:
k = value.split(":")[0].strip()
v = int(value.split(":")[-1].strip())
d[k] = v
sues.append(Sue(id, d))
potential_sues = [sue for sue in sues if sue.matches()]
print([sue.id for sue in potential_sues][0])
| 41.566421
| 59
| 0.669359
|
96196c943aae8d453f4a0051da7d82d47859f827
| 40,110
|
py
|
Python
|
tests/test_config.py
|
RavensburgOP/core
|
0ea76e848b182ca0ebb0fdb54558f7f733898ad7
|
[
"Apache-2.0"
] | 1
|
2019-08-28T00:54:28.000Z
|
2019-08-28T00:54:28.000Z
|
tests/test_config.py
|
RavensburgOP/core
|
0ea76e848b182ca0ebb0fdb54558f7f733898ad7
|
[
"Apache-2.0"
] | 72
|
2020-08-03T07:31:01.000Z
|
2022-03-31T06:11:49.000Z
|
tests/test_config.py
|
Vaarlion/core
|
f3de8b9f28de01abf72c0f5bb0b457eb1841f201
|
[
"Apache-2.0"
] | null | null | null |
"""Test config utils."""
# pylint: disable=protected-access
from collections import OrderedDict
import copy
import os
from unittest import mock
from unittest.mock import AsyncMock, Mock, patch
import pytest
import voluptuous as vol
from voluptuous import Invalid, MultipleInvalid
import yaml
import homeassistant.config as config_util
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_FRIENDLY_NAME,
CONF_AUTH_MFA_MODULES,
CONF_AUTH_PROVIDERS,
CONF_CUSTOMIZE,
CONF_LATITUDE,
CONF_LONGITUDE,
CONF_NAME,
CONF_TEMPERATURE_UNIT,
CONF_UNIT_SYSTEM,
CONF_UNIT_SYSTEM_IMPERIAL,
CONF_UNIT_SYSTEM_METRIC,
__version__,
)
from homeassistant.core import SOURCE_STORAGE, HomeAssistantError
from homeassistant.helpers import config_validation as cv
import homeassistant.helpers.check_config as check_config
from homeassistant.helpers.entity import Entity
from homeassistant.loader import async_get_integration
from homeassistant.util import dt as dt_util
from homeassistant.util.yaml import SECRET_YAML
from tests.common import get_test_config_dir, patch_yaml_files
CONFIG_DIR = get_test_config_dir()
YAML_PATH = os.path.join(CONFIG_DIR, config_util.YAML_CONFIG_FILE)
SECRET_PATH = os.path.join(CONFIG_DIR, SECRET_YAML)
VERSION_PATH = os.path.join(CONFIG_DIR, config_util.VERSION_FILE)
GROUP_PATH = os.path.join(CONFIG_DIR, config_util.GROUP_CONFIG_PATH)
AUTOMATIONS_PATH = os.path.join(CONFIG_DIR, config_util.AUTOMATION_CONFIG_PATH)
SCRIPTS_PATH = os.path.join(CONFIG_DIR, config_util.SCRIPT_CONFIG_PATH)
SCENES_PATH = os.path.join(CONFIG_DIR, config_util.SCENE_CONFIG_PATH)
ORIG_TIMEZONE = dt_util.DEFAULT_TIME_ZONE
def create_file(path):
"""Create an empty file."""
with open(path, "w"):
pass
def teardown():
"""Clean up."""
dt_util.DEFAULT_TIME_ZONE = ORIG_TIMEZONE
if os.path.isfile(YAML_PATH):
os.remove(YAML_PATH)
if os.path.isfile(SECRET_PATH):
os.remove(SECRET_PATH)
if os.path.isfile(VERSION_PATH):
os.remove(VERSION_PATH)
if os.path.isfile(GROUP_PATH):
os.remove(GROUP_PATH)
if os.path.isfile(AUTOMATIONS_PATH):
os.remove(AUTOMATIONS_PATH)
if os.path.isfile(SCRIPTS_PATH):
os.remove(SCRIPTS_PATH)
if os.path.isfile(SCENES_PATH):
os.remove(SCENES_PATH)
async def test_create_default_config(hass):
"""Test creation of default config."""
await config_util.async_create_default_config(hass)
assert os.path.isfile(YAML_PATH)
assert os.path.isfile(SECRET_PATH)
assert os.path.isfile(VERSION_PATH)
assert os.path.isfile(GROUP_PATH)
assert os.path.isfile(AUTOMATIONS_PATH)
async def test_ensure_config_exists_creates_config(hass):
"""Test that calling ensure_config_exists.
If not creates a new config file.
"""
with patch("builtins.print") as mock_print:
await config_util.async_ensure_config_exists(hass)
assert os.path.isfile(YAML_PATH)
assert mock_print.called
async def test_ensure_config_exists_uses_existing_config(hass):
"""Test that calling ensure_config_exists uses existing config."""
create_file(YAML_PATH)
await config_util.async_ensure_config_exists(hass)
with open(YAML_PATH) as fp:
content = fp.read()
# File created with create_file are empty
assert content == ""
async def test_ensure_existing_files_is_not_overwritten(hass):
"""Test that calling async_create_default_config does not overwrite existing files."""
create_file(SECRET_PATH)
await config_util.async_create_default_config(hass)
with open(SECRET_PATH) as fp:
content = fp.read()
# File created with create_file are empty
assert content == ""
def test_load_yaml_config_converts_empty_files_to_dict():
"""Test that loading an empty file returns an empty dict."""
create_file(YAML_PATH)
assert isinstance(config_util.load_yaml_config_file(YAML_PATH), dict)
def test_load_yaml_config_raises_error_if_not_dict():
"""Test error raised when YAML file is not a dict."""
with open(YAML_PATH, "w") as fp:
fp.write("5")
with pytest.raises(HomeAssistantError):
config_util.load_yaml_config_file(YAML_PATH)
def test_load_yaml_config_raises_error_if_malformed_yaml():
"""Test error raised if invalid YAML."""
with open(YAML_PATH, "w") as fp:
fp.write(":")
with pytest.raises(HomeAssistantError):
config_util.load_yaml_config_file(YAML_PATH)
def test_load_yaml_config_raises_error_if_unsafe_yaml():
"""Test error raised if unsafe YAML."""
with open(YAML_PATH, "w") as fp:
fp.write("hello: !!python/object/apply:os.system")
with pytest.raises(HomeAssistantError):
config_util.load_yaml_config_file(YAML_PATH)
def test_load_yaml_config_preserves_key_order():
"""Test removal of library."""
with open(YAML_PATH, "w") as fp:
fp.write("hello: 2\n")
fp.write("world: 1\n")
assert [("hello", 2), ("world", 1)] == list(
config_util.load_yaml_config_file(YAML_PATH).items()
)
async def test_create_default_config_returns_none_if_write_error(hass):
"""Test the writing of a default configuration.
Non existing folder returns None.
"""
hass.config.config_dir = os.path.join(CONFIG_DIR, "non_existing_dir/")
with patch("builtins.print") as mock_print:
assert await config_util.async_create_default_config(hass) is False
assert mock_print.called
def test_core_config_schema():
"""Test core config schema."""
for value in (
{CONF_UNIT_SYSTEM: "K"},
{"time_zone": "non-exist"},
{"latitude": "91"},
{"longitude": -181},
{"external_url": "not an url"},
{"internal_url": "not an url"},
{"currency", 100},
{"customize": "bla"},
{"customize": {"light.sensor": 100}},
{"customize": {"entity_id": []}},
):
with pytest.raises(MultipleInvalid):
config_util.CORE_CONFIG_SCHEMA(value)
config_util.CORE_CONFIG_SCHEMA(
{
"name": "Test name",
"latitude": "-23.45",
"longitude": "123.45",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_METRIC,
"currency": "USD",
"customize": {"sensor.temperature": {"hidden": True}},
}
)
def test_customize_dict_schema():
"""Test basic customize config validation."""
values = ({ATTR_FRIENDLY_NAME: None}, {ATTR_ASSUMED_STATE: "2"})
for val in values:
with pytest.raises(MultipleInvalid):
config_util.CUSTOMIZE_DICT_SCHEMA(val)
assert config_util.CUSTOMIZE_DICT_SCHEMA(
{ATTR_FRIENDLY_NAME: 2, ATTR_ASSUMED_STATE: "0"}
) == {ATTR_FRIENDLY_NAME: "2", ATTR_ASSUMED_STATE: False}
def test_customize_glob_is_ordered():
"""Test that customize_glob preserves order."""
conf = config_util.CORE_CONFIG_SCHEMA({"customize_glob": OrderedDict()})
assert isinstance(conf["customize_glob"], OrderedDict)
async def _compute_state(hass, config):
await config_util.async_process_ha_core_config(hass, config)
entity = Entity()
entity.entity_id = "test.test"
entity.hass = hass
entity.schedule_update_ha_state()
await hass.async_block_till_done()
return hass.states.get("test.test")
async def test_entity_customization(hass):
"""Test entity customization through configuration."""
config = {
CONF_LATITUDE: 50,
CONF_LONGITUDE: 50,
CONF_NAME: "Test",
CONF_CUSTOMIZE: {"test.test": {"hidden": True}},
}
state = await _compute_state(hass, config)
assert state.attributes["hidden"]
@patch("homeassistant.config.shutil")
@patch("homeassistant.config.os")
@patch("homeassistant.config.is_docker_env", return_value=False)
def test_remove_lib_on_upgrade(mock_docker, mock_os, mock_shutil, hass):
"""Test removal of library on upgrade from before 0.50."""
ha_version = "0.49.0"
mock_os.path.isdir = mock.Mock(return_value=True)
mock_open = mock.mock_open()
with patch("homeassistant.config.open", mock_open, create=True):
opened_file = mock_open.return_value
# pylint: disable=no-member
opened_file.readline.return_value = ha_version
hass.config.path = mock.Mock()
config_util.process_ha_config_upgrade(hass)
hass_path = hass.config.path.return_value
assert mock_os.path.isdir.call_count == 1
assert mock_os.path.isdir.call_args == mock.call(hass_path)
assert mock_shutil.rmtree.call_count == 1
assert mock_shutil.rmtree.call_args == mock.call(hass_path)
@patch("homeassistant.config.shutil")
@patch("homeassistant.config.os")
@patch("homeassistant.config.is_docker_env", return_value=True)
def test_remove_lib_on_upgrade_94(mock_docker, mock_os, mock_shutil, hass):
"""Test removal of library on upgrade from before 0.94 and in Docker."""
ha_version = "0.93.0.dev0"
mock_os.path.isdir = mock.Mock(return_value=True)
mock_open = mock.mock_open()
with patch("homeassistant.config.open", mock_open, create=True):
opened_file = mock_open.return_value
# pylint: disable=no-member
opened_file.readline.return_value = ha_version
hass.config.path = mock.Mock()
config_util.process_ha_config_upgrade(hass)
hass_path = hass.config.path.return_value
assert mock_os.path.isdir.call_count == 1
assert mock_os.path.isdir.call_args == mock.call(hass_path)
assert mock_shutil.rmtree.call_count == 1
assert mock_shutil.rmtree.call_args == mock.call(hass_path)
def test_process_config_upgrade(hass):
"""Test update of version on upgrade."""
ha_version = "0.92.0"
mock_open = mock.mock_open()
with patch("homeassistant.config.open", mock_open, create=True), patch.object(
config_util, "__version__", "0.91.0"
):
opened_file = mock_open.return_value
# pylint: disable=no-member
opened_file.readline.return_value = ha_version
config_util.process_ha_config_upgrade(hass)
assert opened_file.write.call_count == 1
assert opened_file.write.call_args == mock.call("0.91.0")
def test_config_upgrade_same_version(hass):
"""Test no update of version on no upgrade."""
ha_version = __version__
mock_open = mock.mock_open()
with patch("homeassistant.config.open", mock_open, create=True):
opened_file = mock_open.return_value
# pylint: disable=no-member
opened_file.readline.return_value = ha_version
config_util.process_ha_config_upgrade(hass)
assert opened_file.write.call_count == 0
def test_config_upgrade_no_file(hass):
"""Test update of version on upgrade, with no version file."""
mock_open = mock.mock_open()
mock_open.side_effect = [FileNotFoundError(), mock.DEFAULT, mock.DEFAULT]
with patch("homeassistant.config.open", mock_open, create=True):
opened_file = mock_open.return_value
# pylint: disable=no-member
config_util.process_ha_config_upgrade(hass)
assert opened_file.write.call_count == 1
assert opened_file.write.call_args == mock.call(__version__)
async def test_loading_configuration_from_storage(hass, hass_storage):
"""Test loading core config onto hass object."""
hass_storage["core.config"] = {
"data": {
"elevation": 10,
"latitude": 55,
"location_name": "Home",
"longitude": 13,
"time_zone": "Europe/Copenhagen",
"unit_system": "metric",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
"currency": "EUR",
},
"key": "core.config",
"version": 1,
}
await config_util.async_process_ha_core_config(
hass, {"allowlist_external_dirs": "/etc"}
)
assert hass.config.latitude == 55
assert hass.config.longitude == 13
assert hass.config.elevation == 10
assert hass.config.location_name == "Home"
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
assert hass.config.time_zone == "Europe/Copenhagen"
assert hass.config.external_url == "https://www.example.com"
assert hass.config.internal_url == "http://example.local"
assert hass.config.currency == "EUR"
assert len(hass.config.allowlist_external_dirs) == 3
assert "/etc" in hass.config.allowlist_external_dirs
assert hass.config.config_source == SOURCE_STORAGE
async def test_loading_configuration_from_storage_with_yaml_only(hass, hass_storage):
"""Test loading core and YAML config onto hass object."""
hass_storage["core.config"] = {
"data": {
"elevation": 10,
"latitude": 55,
"location_name": "Home",
"longitude": 13,
"time_zone": "Europe/Copenhagen",
"unit_system": "metric",
},
"key": "core.config",
"version": 1,
}
await config_util.async_process_ha_core_config(
hass, {"media_dirs": {"mymedia": "/usr"}, "allowlist_external_dirs": "/etc"}
)
assert hass.config.latitude == 55
assert hass.config.longitude == 13
assert hass.config.elevation == 10
assert hass.config.location_name == "Home"
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
assert hass.config.time_zone == "Europe/Copenhagen"
assert len(hass.config.allowlist_external_dirs) == 3
assert "/etc" in hass.config.allowlist_external_dirs
assert hass.config.media_dirs == {"mymedia": "/usr"}
assert hass.config.config_source == SOURCE_STORAGE
async def test_updating_configuration(hass, hass_storage):
"""Test updating configuration stores the new configuration."""
core_data = {
"data": {
"elevation": 10,
"latitude": 55,
"location_name": "Home",
"longitude": 13,
"time_zone": "Europe/Copenhagen",
"unit_system": "metric",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
"currency": "BTC",
},
"key": "core.config",
"version": 1,
}
hass_storage["core.config"] = dict(core_data)
await config_util.async_process_ha_core_config(
hass, {"allowlist_external_dirs": "/etc"}
)
await hass.config.async_update(latitude=50, currency="USD")
new_core_data = copy.deepcopy(core_data)
new_core_data["data"]["latitude"] = 50
new_core_data["data"]["currency"] = "USD"
assert hass_storage["core.config"] == new_core_data
assert hass.config.latitude == 50
assert hass.config.currency == "USD"
async def test_override_stored_configuration(hass, hass_storage):
"""Test loading core and YAML config onto hass object."""
hass_storage["core.config"] = {
"data": {
"elevation": 10,
"latitude": 55,
"location_name": "Home",
"longitude": 13,
"time_zone": "Europe/Copenhagen",
"unit_system": "metric",
},
"key": "core.config",
"version": 1,
}
await config_util.async_process_ha_core_config(
hass, {"latitude": 60, "allowlist_external_dirs": "/etc"}
)
assert hass.config.latitude == 60
assert hass.config.longitude == 13
assert hass.config.elevation == 10
assert hass.config.location_name == "Home"
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
assert hass.config.time_zone == "Europe/Copenhagen"
assert len(hass.config.allowlist_external_dirs) == 3
assert "/etc" in hass.config.allowlist_external_dirs
assert hass.config.config_source == config_util.SOURCE_YAML
async def test_loading_configuration(hass):
"""Test loading core config onto hass object."""
await config_util.async_process_ha_core_config(
hass,
{
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "America/New_York",
"allowlist_external_dirs": "/etc",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
"media_dirs": {"mymedia": "/usr"},
"legacy_templates": True,
"currency": "EUR",
},
)
assert hass.config.latitude == 60
assert hass.config.longitude == 50
assert hass.config.elevation == 25
assert hass.config.location_name == "Huis"
assert hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL
assert hass.config.time_zone == "America/New_York"
assert hass.config.external_url == "https://www.example.com"
assert hass.config.internal_url == "http://example.local"
assert len(hass.config.allowlist_external_dirs) == 3
assert "/etc" in hass.config.allowlist_external_dirs
assert "/usr" in hass.config.allowlist_external_dirs
assert hass.config.media_dirs == {"mymedia": "/usr"}
assert hass.config.config_source == config_util.SOURCE_YAML
assert hass.config.legacy_templates is True
assert hass.config.currency == "EUR"
async def test_loading_configuration_temperature_unit(hass):
"""Test backward compatibility when loading core config."""
await config_util.async_process_ha_core_config(
hass,
{
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_TEMPERATURE_UNIT: "C",
"time_zone": "America/New_York",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
},
)
assert hass.config.latitude == 60
assert hass.config.longitude == 50
assert hass.config.elevation == 25
assert hass.config.location_name == "Huis"
assert hass.config.units.name == CONF_UNIT_SYSTEM_METRIC
assert hass.config.time_zone == "America/New_York"
assert hass.config.external_url == "https://www.example.com"
assert hass.config.internal_url == "http://example.local"
assert hass.config.config_source == config_util.SOURCE_YAML
assert hass.config.currency == "EUR"
async def test_loading_configuration_default_media_dirs_docker(hass):
"""Test loading core config onto hass object."""
with patch("homeassistant.config.is_docker_env", return_value=True):
await config_util.async_process_ha_core_config(
hass,
{
"name": "Huis",
},
)
assert hass.config.location_name == "Huis"
assert len(hass.config.allowlist_external_dirs) == 2
assert "/media" in hass.config.allowlist_external_dirs
assert hass.config.media_dirs == {"local": "/media"}
async def test_loading_configuration_from_packages(hass):
"""Test loading packages config onto hass object config."""
await config_util.async_process_ha_core_config(
hass,
{
"latitude": 39,
"longitude": -1,
"elevation": 500,
"name": "Huis",
CONF_TEMPERATURE_UNIT: "C",
"time_zone": "Europe/Madrid",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
"packages": {
"package_1": {"wake_on_lan": None},
"package_2": {
"light": {"platform": "hue"},
"media_extractor": None,
"sun": None,
},
},
},
)
# Empty packages not allowed
with pytest.raises(MultipleInvalid):
await config_util.async_process_ha_core_config(
hass,
{
"latitude": 39,
"longitude": -1,
"elevation": 500,
"name": "Huis",
CONF_TEMPERATURE_UNIT: "C",
"time_zone": "Europe/Madrid",
"packages": {"empty_package": None},
},
)
@patch("homeassistant.helpers.check_config.async_check_ha_config_file")
async def test_check_ha_config_file_correct(mock_check, hass):
"""Check that restart propagates to stop."""
mock_check.return_value = check_config.HomeAssistantConfig()
assert await config_util.async_check_ha_config_file(hass) is None
@patch("homeassistant.helpers.check_config.async_check_ha_config_file")
async def test_check_ha_config_file_wrong(mock_check, hass):
"""Check that restart with a bad config doesn't propagate to stop."""
mock_check.return_value = check_config.HomeAssistantConfig()
mock_check.return_value.add_error("bad")
assert await config_util.async_check_ha_config_file(hass) == "bad"
@patch("homeassistant.config.os.path.isfile", mock.Mock(return_value=True))
async def test_async_hass_config_yaml_merge(merge_log_err, hass):
"""Test merge during async config reload."""
config = {
config_util.CONF_CORE: {
config_util.CONF_PACKAGES: {"pack_dict": {"input_boolean": {"ib1": None}}}
},
"input_boolean": {"ib2": None},
"light": {"platform": "test"},
}
files = {config_util.YAML_CONFIG_FILE: yaml.dump(config)}
with patch_yaml_files(files, True):
conf = await config_util.async_hass_config_yaml(hass)
assert merge_log_err.call_count == 0
assert conf[config_util.CONF_CORE].get(config_util.CONF_PACKAGES) is not None
assert len(conf) == 3
assert len(conf["input_boolean"]) == 2
assert len(conf["light"]) == 1
# pylint: disable=redefined-outer-name
@pytest.fixture
def merge_log_err(hass):
"""Patch _merge_log_error from packages."""
with patch("homeassistant.config._LOGGER.error") as logerr:
yield logerr
async def test_merge(merge_log_err, hass):
"""Test if we can merge packages."""
packages = {
"pack_dict": {"input_boolean": {"ib1": None}},
"pack_11": {"input_select": {"is1": None}},
"pack_list": {"light": {"platform": "test"}},
"pack_list2": {"light": [{"platform": "test"}]},
"pack_none": {"wake_on_lan": None},
"pack_special": {
"automation": [{"some": "yay"}],
"script": {"a_script": "yay"},
"template": [{"some": "yay"}],
},
}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"input_boolean": {"ib2": None},
"light": {"platform": "test"},
"automation": [],
"script": {},
"template": [],
}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 0
assert len(config) == 8
assert len(config["input_boolean"]) == 2
assert len(config["input_select"]) == 1
assert len(config["light"]) == 3
assert len(config["automation"]) == 1
assert len(config["script"]) == 1
assert len(config["template"]) == 1
assert isinstance(config["wake_on_lan"], OrderedDict)
async def test_merge_try_falsy(merge_log_err, hass):
"""Ensure we don't add falsy items like empty OrderedDict() to list."""
packages = {
"pack_falsy_to_lst": {"automation": OrderedDict()},
"pack_list2": {"light": OrderedDict()},
}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"automation": {"do": "something"},
"light": {"some": "light"},
}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 0
assert len(config) == 3
assert len(config["automation"]) == 1
assert len(config["light"]) == 1
async def test_merge_new(merge_log_err, hass):
"""Test adding new components to outer scope."""
packages = {
"pack_1": {"light": [{"platform": "one"}]},
"pack_11": {"input_select": {"ib1": None}},
"pack_2": {
"light": {"platform": "one"},
"panel_custom": {"pan1": None},
"api": {},
},
}
config = {config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages}}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 0
assert "api" in config
assert len(config) == 5
assert len(config["light"]) == 2
assert len(config["panel_custom"]) == 1
async def test_merge_type_mismatch(merge_log_err, hass):
"""Test if we have a type mismatch for packages."""
packages = {
"pack_1": {"input_boolean": [{"ib1": None}]},
"pack_11": {"input_select": {"ib1": None}},
"pack_2": {"light": {"ib1": None}}, # light gets merged - ensure_list
}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"input_boolean": {"ib2": None},
"input_select": [{"ib2": None}],
"light": [{"platform": "two"}],
}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 2
assert len(config) == 4
assert len(config["input_boolean"]) == 1
assert len(config["light"]) == 2
async def test_merge_once_only_keys(merge_log_err, hass):
"""Test if we have a merge for a comp that may occur only once. Keys."""
packages = {"pack_2": {"api": None}}
config = {config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages}, "api": None}
await config_util.merge_packages_config(hass, config, packages)
assert config["api"] == OrderedDict()
packages = {"pack_2": {"api": {"key_3": 3}}}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"api": {"key_1": 1, "key_2": 2},
}
await config_util.merge_packages_config(hass, config, packages)
assert config["api"] == {"key_1": 1, "key_2": 2, "key_3": 3}
# Duplicate keys error
packages = {"pack_2": {"api": {"key": 2}}}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"api": {"key": 1},
}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 1
async def test_merge_once_only_lists(hass):
"""Test if we have a merge for a comp that may occur only once. Lists."""
packages = {
"pack_2": {
"api": {"list_1": ["item_2", "item_3"], "list_2": ["item_4"], "list_3": []}
}
}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"api": {"list_1": ["item_1"]},
}
await config_util.merge_packages_config(hass, config, packages)
assert config["api"] == {
"list_1": ["item_1", "item_2", "item_3"],
"list_2": ["item_4"],
"list_3": [],
}
async def test_merge_once_only_dictionaries(hass):
"""Test if we have a merge for a comp that may occur only once. Dicts."""
packages = {
"pack_2": {
"api": {
"dict_1": {"key_2": 2, "dict_1.1": {"key_1.2": 1.2}},
"dict_2": {"key_1": 1},
"dict_3": {},
}
}
}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"api": {"dict_1": {"key_1": 1, "dict_1.1": {"key_1.1": 1.1}}},
}
await config_util.merge_packages_config(hass, config, packages)
assert config["api"] == {
"dict_1": {
"key_1": 1,
"key_2": 2,
"dict_1.1": {"key_1.1": 1.1, "key_1.2": 1.2},
},
"dict_2": {"key_1": 1},
}
async def test_merge_id_schema(hass):
"""Test if we identify the config schemas correctly."""
types = {
"panel_custom": "list",
"group": "dict",
"input_boolean": "dict",
"shell_command": "dict",
"qwikswitch": "dict",
}
for domain, expected_type in types.items():
integration = await async_get_integration(hass, domain)
module = integration.get_component()
typ = config_util._identify_config_schema(module)
assert typ == expected_type, f"{domain} expected {expected_type}, got {typ}"
async def test_merge_duplicate_keys(merge_log_err, hass):
"""Test if keys in dicts are duplicates."""
packages = {"pack_1": {"input_select": {"ib1": None}}}
config = {
config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages},
"input_select": {"ib1": 1},
}
await config_util.merge_packages_config(hass, config, packages)
assert merge_log_err.call_count == 1
assert len(config) == 2
assert len(config["input_select"]) == 1
async def test_merge_customize(hass):
"""Test loading core config onto hass object."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
"customize": {"a.a": {"friendly_name": "A"}},
"packages": {
"pkg1": {"homeassistant": {"customize": {"b.b": {"friendly_name": "BB"}}}}
},
}
await config_util.async_process_ha_core_config(hass, core_config)
assert hass.data[config_util.DATA_CUSTOMIZE].get("b.b") == {"friendly_name": "BB"}
async def test_auth_provider_config(hass):
"""Test loading auth provider config onto hass object."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
CONF_AUTH_PROVIDERS: [
{"type": "homeassistant"},
{"type": "legacy_api_password", "api_password": "some-pass"},
],
CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp", "id": "second"}],
}
if hasattr(hass, "auth"):
del hass.auth
await config_util.async_process_ha_core_config(hass, core_config)
assert len(hass.auth.auth_providers) == 2
assert hass.auth.auth_providers[0].type == "homeassistant"
assert hass.auth.auth_providers[1].type == "legacy_api_password"
assert len(hass.auth.auth_mfa_modules) == 2
assert hass.auth.auth_mfa_modules[0].id == "totp"
assert hass.auth.auth_mfa_modules[1].id == "second"
async def test_auth_provider_config_default(hass):
"""Test loading default auth provider config."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
}
if hasattr(hass, "auth"):
del hass.auth
await config_util.async_process_ha_core_config(hass, core_config)
assert len(hass.auth.auth_providers) == 1
assert hass.auth.auth_providers[0].type == "homeassistant"
assert len(hass.auth.auth_mfa_modules) == 1
assert hass.auth.auth_mfa_modules[0].id == "totp"
async def test_disallowed_auth_provider_config(hass):
"""Test loading insecure example auth provider is disallowed."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
CONF_AUTH_PROVIDERS: [
{
"type": "insecure_example",
"users": [
{
"username": "test-user",
"password": "test-pass",
"name": "Test Name",
}
],
}
],
}
with pytest.raises(Invalid):
await config_util.async_process_ha_core_config(hass, core_config)
async def test_disallowed_duplicated_auth_provider_config(hass):
"""Test loading insecure example auth provider is disallowed."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
CONF_AUTH_PROVIDERS: [{"type": "homeassistant"}, {"type": "homeassistant"}],
}
with pytest.raises(Invalid):
await config_util.async_process_ha_core_config(hass, core_config)
async def test_disallowed_auth_mfa_module_config(hass):
"""Test loading insecure example auth mfa module is disallowed."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
CONF_AUTH_MFA_MODULES: [
{
"type": "insecure_example",
"data": [{"user_id": "mock-user", "pin": "test-pin"}],
}
],
}
with pytest.raises(Invalid):
await config_util.async_process_ha_core_config(hass, core_config)
async def test_disallowed_duplicated_auth_mfa_module_config(hass):
"""Test loading insecure example auth mfa module is disallowed."""
core_config = {
"latitude": 60,
"longitude": 50,
"elevation": 25,
"name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "GMT",
CONF_AUTH_MFA_MODULES: [{"type": "totp"}, {"type": "totp"}],
}
with pytest.raises(Invalid):
await config_util.async_process_ha_core_config(hass, core_config)
async def test_merge_split_component_definition(hass):
"""Test components with trailing description in packages are merged."""
packages = {
"pack_1": {"light one": {"l1": None}},
"pack_2": {"light two": {"l2": None}, "light three": {"l3": None}},
}
config = {config_util.CONF_CORE: {config_util.CONF_PACKAGES: packages}}
await config_util.merge_packages_config(hass, config, packages)
assert len(config) == 4
assert len(config["light one"]) == 1
assert len(config["light two"]) == 1
assert len(config["light three"]) == 1
async def test_component_config_exceptions(hass, caplog):
"""Test unexpected exceptions validating component config."""
# Config validator
assert (
await config_util.async_process_component_config(
hass,
{},
integration=Mock(
domain="test_domain",
get_platform=Mock(
return_value=Mock(
async_validate_config=AsyncMock(
side_effect=ValueError("broken")
)
)
),
),
)
is None
)
assert "ValueError: broken" in caplog.text
assert "Unknown error calling test_domain config validator" in caplog.text
# component.CONFIG_SCHEMA
caplog.clear()
assert (
await config_util.async_process_component_config(
hass,
{},
integration=Mock(
domain="test_domain",
get_platform=Mock(return_value=None),
get_component=Mock(
return_value=Mock(
CONFIG_SCHEMA=Mock(side_effect=ValueError("broken"))
)
),
),
)
is None
)
assert "ValueError: broken" in caplog.text
assert "Unknown error calling test_domain CONFIG_SCHEMA" in caplog.text
# component.PLATFORM_SCHEMA
caplog.clear()
assert (
await config_util.async_process_component_config(
hass,
{"test_domain": {"platform": "test_platform"}},
integration=Mock(
domain="test_domain",
get_platform=Mock(return_value=None),
get_component=Mock(
return_value=Mock(
spec=["PLATFORM_SCHEMA_BASE"],
PLATFORM_SCHEMA_BASE=Mock(side_effect=ValueError("broken")),
)
),
),
)
== {"test_domain": []}
)
assert "ValueError: broken" in caplog.text
assert (
"Unknown error validating test_platform platform config with test_domain component platform schema"
in caplog.text
)
# platform.PLATFORM_SCHEMA
caplog.clear()
with patch(
"homeassistant.config.async_get_integration_with_requirements",
return_value=Mock( # integration that owns platform
get_platform=Mock(
return_value=Mock( # platform
PLATFORM_SCHEMA=Mock(side_effect=ValueError("broken"))
)
)
),
):
assert (
await config_util.async_process_component_config(
hass,
{"test_domain": {"platform": "test_platform"}},
integration=Mock(
domain="test_domain",
get_platform=Mock(return_value=None),
get_component=Mock(
return_value=Mock(spec=["PLATFORM_SCHEMA_BASE"])
),
),
)
== {"test_domain": []}
)
assert "ValueError: broken" in caplog.text
assert (
"Unknown error validating config for test_platform platform for test_domain component with PLATFORM_SCHEMA"
in caplog.text
)
# get_platform("config") raising
caplog.clear()
assert (
await config_util.async_process_component_config(
hass,
{"test_domain": {}},
integration=Mock(
pkg_path="homeassistant.components.test_domain",
domain="test_domain",
get_platform=Mock(
side_effect=ImportError(
"ModuleNotFoundError: No module named 'not_installed_something'",
name="not_installed_something",
)
),
),
)
is None
)
assert (
"Error importing config platform test_domain: ModuleNotFoundError: No module named 'not_installed_something'"
in caplog.text
)
# get_component raising
caplog.clear()
assert (
await config_util.async_process_component_config(
hass,
{"test_domain": {}},
integration=Mock(
pkg_path="homeassistant.components.test_domain",
domain="test_domain",
get_component=Mock(
side_effect=FileNotFoundError(
"No such file or directory: b'liblibc.a'"
)
),
),
)
is None
)
assert "Unable to import test_domain: No such file or directory" in caplog.text
@pytest.mark.parametrize(
"domain, schema, expected",
[
("zone", vol.Schema({vol.Optional("zone", default=list): [int]}), "list"),
("zone", vol.Schema({vol.Optional("zone", default=[]): [int]}), "list"),
(
"zone",
vol.Schema({vol.Optional("zone", default={}): {vol.Optional("hello"): 1}}),
"dict",
),
(
"zone",
vol.Schema(
{vol.Optional("zone", default=dict): {vol.Optional("hello"): 1}}
),
"dict",
),
("zone", vol.Schema({vol.Optional("zone"): int}), None),
("zone", vol.Schema({"zone": int}), None),
(
"not_existing",
vol.Schema({vol.Optional("zone", default=dict): dict}),
None,
),
("non_existing", vol.Schema({"zone": int}), None),
("zone", vol.Schema({}), None),
("plex", vol.Schema(vol.All({"plex": {"host": str}})), "dict"),
("openuv", cv.deprecated("openuv"), None),
],
)
def test_identify_config_schema(domain, schema, expected):
"""Test identify config schema."""
assert (
config_util._identify_config_schema(Mock(DOMAIN=domain, CONFIG_SCHEMA=schema))
== expected
)
| 33.991525
| 119
| 0.622862
|
ce78348ea29888cf46c9fcf38e4784e877ace145
| 10,812
|
py
|
Python
|
sdk/python/pulumi_azure_native/dbforpostgresql/v20200101privatepreview/server_key.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/dbforpostgresql/v20200101privatepreview/server_key.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/dbforpostgresql/v20200101privatepreview/server_key.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = ['ServerKeyArgs', 'ServerKey']
@pulumi.input_type
class ServerKeyArgs:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
server_key_type: pulumi.Input[Union[str, 'ServerKeyType']],
server_name: pulumi.Input[str],
key_name: Optional[pulumi.Input[str]] = None,
uri: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ServerKey resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param pulumi.Input[Union[str, 'ServerKeyType']] server_key_type: The key type like 'AzureKeyVault'.
:param pulumi.Input[str] server_name: The name of the server.
:param pulumi.Input[str] key_name: The name of the PostgreSQL Server key to be operated on (updated or created).
:param pulumi.Input[str] uri: The URI of the key.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "server_key_type", server_key_type)
pulumi.set(__self__, "server_name", server_name)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
if uri is not None:
pulumi.set(__self__, "uri", uri)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="serverKeyType")
def server_key_type(self) -> pulumi.Input[Union[str, 'ServerKeyType']]:
"""
The key type like 'AzureKeyVault'.
"""
return pulumi.get(self, "server_key_type")
@server_key_type.setter
def server_key_type(self, value: pulumi.Input[Union[str, 'ServerKeyType']]):
pulumi.set(self, "server_key_type", value)
@property
@pulumi.getter(name="serverName")
def server_name(self) -> pulumi.Input[str]:
"""
The name of the server.
"""
return pulumi.get(self, "server_name")
@server_name.setter
def server_name(self, value: pulumi.Input[str]):
pulumi.set(self, "server_name", value)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the PostgreSQL Server key to be operated on (updated or created).
"""
return pulumi.get(self, "key_name")
@key_name.setter
def key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_name", value)
@property
@pulumi.getter
def uri(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the key.
"""
return pulumi.get(self, "uri")
@uri.setter
def uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uri", value)
class ServerKey(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
key_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_key_type: Optional[pulumi.Input[Union[str, 'ServerKeyType']]] = None,
server_name: Optional[pulumi.Input[str]] = None,
uri: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
A PostgreSQL Server key.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] key_name: The name of the PostgreSQL Server key to be operated on (updated or created).
:param pulumi.Input[str] resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param pulumi.Input[Union[str, 'ServerKeyType']] server_key_type: The key type like 'AzureKeyVault'.
:param pulumi.Input[str] server_name: The name of the server.
:param pulumi.Input[str] uri: The URI of the key.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ServerKeyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
A PostgreSQL Server key.
:param str resource_name: The name of the resource.
:param ServerKeyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ServerKeyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
key_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server_key_type: Optional[pulumi.Input[Union[str, 'ServerKeyType']]] = None,
server_name: Optional[pulumi.Input[str]] = None,
uri: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ServerKeyArgs.__new__(ServerKeyArgs)
__props__.__dict__["key_name"] = key_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if server_key_type is None and not opts.urn:
raise TypeError("Missing required property 'server_key_type'")
__props__.__dict__["server_key_type"] = server_key_type
if server_name is None and not opts.urn:
raise TypeError("Missing required property 'server_name'")
__props__.__dict__["server_name"] = server_name
__props__.__dict__["uri"] = uri
__props__.__dict__["creation_date"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:dbforpostgresql/v20200101privatepreview:ServerKey"), pulumi.Alias(type_="azure-native:dbforpostgresql:ServerKey"), pulumi.Alias(type_="azure-nextgen:dbforpostgresql:ServerKey"), pulumi.Alias(type_="azure-native:dbforpostgresql/v20200101:ServerKey"), pulumi.Alias(type_="azure-nextgen:dbforpostgresql/v20200101:ServerKey")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(ServerKey, __self__).__init__(
'azure-native:dbforpostgresql/v20200101privatepreview:ServerKey',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'ServerKey':
"""
Get an existing ServerKey resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = ServerKeyArgs.__new__(ServerKeyArgs)
__props__.__dict__["creation_date"] = None
__props__.__dict__["kind"] = None
__props__.__dict__["name"] = None
__props__.__dict__["server_key_type"] = None
__props__.__dict__["type"] = None
__props__.__dict__["uri"] = None
return ServerKey(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="creationDate")
def creation_date(self) -> pulumi.Output[str]:
"""
The key creation date.
"""
return pulumi.get(self, "creation_date")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[str]:
"""
Kind of encryption protector. This is metadata used for the Azure portal experience.
"""
return pulumi.get(self, "kind")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="serverKeyType")
def server_key_type(self) -> pulumi.Output[str]:
"""
The key type like 'AzureKeyVault'.
"""
return pulumi.get(self, "server_key_type")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def uri(self) -> pulumi.Output[Optional[str]]:
"""
The URI of the key.
"""
return pulumi.get(self, "uri")
| 42.070039
| 410
| 0.63892
|
25981c7bf28d76978a75e471521c2d656c74de08
| 2,237
|
py
|
Python
|
bstree.py
|
gidj/pyBSTree
|
9eb5a11a073b6c2023984d9cebcb28c590725bca
|
[
"BSD-2-Clause"
] | 1
|
2017-11-17T06:18:47.000Z
|
2017-11-17T06:18:47.000Z
|
bstree.py
|
gidj/pyBSTree
|
9eb5a11a073b6c2023984d9cebcb28c590725bca
|
[
"BSD-2-Clause"
] | null | null | null |
bstree.py
|
gidj/pyBSTree
|
9eb5a11a073b6c2023984d9cebcb28c590725bca
|
[
"BSD-2-Clause"
] | null | null | null |
import collections
class BSTree:
def __init__(self, *args, **kwargs):
self.head = None
if args:
print args
for item in args:
self.insert(item)
if kwargs:
print kwargs
for item in kwargs.items():
self.insert(*item)
def insert(self, key, value=None):
if not self.head:
self.head = Node(key, value)
return self.head.payload
cursor = self.head
while cursor:
if cursor.key > key:
if cursor.left:
cursor = cursor.left
else:
cursor.left = Node(key, value)
return cursor.left.payload
else:
if cursor.right:
cursor = cursor.right
else:
cursor.right = Node(key, value)
return cursor.right.payload
def search(self, key):
cursor = self.head
while cursor:
if cursor.key == key:
return cursor.payload
elif cursor.key > key:
cursor = cursor.left
else:
cursor = cursor.right
return None
def delete(self, key):
""" Takes a key, finds the node associated with that key and returns the
value of the deleted key after deleting the node and maintaining the
search structure of the tree"""
pass
def print_in_order(self):
if self.head is None:
return []
else:
return self.head.print_in_order()
class Node:
def __init__(self, key, value, left=None, right=None):
self.key = key
# Sometimes the key is the value; if so, payload is just the key
if value is None:
self.payload = self.key
else:
self.payload = value
self.left = left
self.right = right
def print_in_order(self):
sublist = []
if self.left:
sublist.extend(self.left.print_in_order())
sublist.append(self.payload)
if self.right:
sublist.extend(self.right.print_in_order())
return sublist
| 28.316456
| 80
| 0.513634
|
5b7ff10826404314bedffb5789983b43df346a4d
| 3,230
|
py
|
Python
|
thinc/layers/__init__.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | 1
|
2022-03-08T07:25:21.000Z
|
2022-03-08T07:25:21.000Z
|
thinc/layers/__init__.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | null | null | null |
thinc/layers/__init__.py
|
TheVinhLuong102/thinc
|
7b54f728ddec7765a1d8a5e553d4b4b90b9edaec
|
[
"MIT"
] | 1
|
2021-07-18T16:28:32.000Z
|
2021-07-18T16:28:32.000Z
|
# Weights layers
from .cauchysimilarity import CauchySimilarity
from .dropout import Dropout
from .embed import Embed
from .expand_window import expand_window
from .hashembed import HashEmbed
from .layernorm import LayerNorm
from .linear import Linear
from .lstm import LSTM, PyTorchLSTM
from .logistic import Logistic
from .maxout import Maxout
from .mish import Mish
from .multisoftmax import MultiSoftmax
from .parametricattention import ParametricAttention
from .pytorchwrapper import PyTorchWrapper, PyTorchWrapper_v2
from .pytorchwrapper import PyTorchRNNWrapper
from .relu import Relu
from .resizable import resizable
from .sigmoid_activation import sigmoid_activation
from .sigmoid import Sigmoid
from .softmax_activation import softmax_activation
from .softmax import Softmax
from .sparselinear import SparseLinear
from .tensorflowwrapper import TensorFlowWrapper, keras_subclass
from .mxnetwrapper import MXNetWrapper
# Combinators
from .add import add
from .bidirectional import bidirectional
from .chain import chain
from .clone import clone
from .concatenate import concatenate
from .map_list import map_list
from .noop import noop
from .residual import residual
from .uniqued import uniqued
from .siamese import siamese
from .tuplify import tuplify
# Pooling
from .reduce_first import reduce_first
from .reduce_last import reduce_last
from .reduce_max import reduce_max
from .reduce_mean import reduce_mean
from .reduce_sum import reduce_sum
# Array manipulation
from .array_getitem import array_getitem
# Data-type transfers
from .list2array import list2array
from .list2ragged import list2ragged
from .list2padded import list2padded
from .ragged2list import ragged2list
from .padded2list import padded2list
from .remap_ids import remap_ids
from .strings2arrays import strings2arrays
from .with_array import with_array
from .with_array2d import with_array2d
from .with_cpu import with_cpu
from .with_flatten import with_flatten
from .with_padded import with_padded
from .with_list import with_list
from .with_ragged import with_ragged
from .with_reshape import with_reshape
from .with_getitem import with_getitem
from .with_debug import with_debug
from .with_nvtx_range import with_nvtx_range
__all__ = [
"CauchySimilarity",
"Linear",
"Dropout",
"Embed",
"expand_window",
"HashEmbed",
"LayerNorm",
"LSTM",
"Maxout",
"Mish",
"MultiSoftmax",
"ParametricAttention",
"PyTorchLSTM",
"PyTorchWrapper",
"PyTorchWrapper_v2",
"PyTorchRNNWrapper",
"Relu",
"sigmoid_activation",
"Sigmoid"
"softmax_activation",
"Softmax",
"SparseLinear",
"TensorFlowWrapper",
"add",
"bidirectional",
"chain",
"clone",
"concatenate",
"noop",
"residual",
"uniqued",
"siamese",
"reduce_first",
"reduce_last",
"reduce_max",
"reduce_mean",
"reduce_sum",
"resizable",
"list2array",
"list2ragged",
"list2padded",
"ragged2list",
"padded2list",
"with_reshape",
"with_getitem",
"with_array",
"with_array2d",
"with_cpu",
"with_list",
"with_ragged",
"with_padded",
"with_flatten",
"with_debug",
"with_nvtx_range",
"remap_ids",
]
| 25.234375
| 64
| 0.760991
|
12e7df6d832adc256529e84feb7c8382bd08e6bd
| 1,147
|
py
|
Python
|
system_disk/home/pi/tests/testAutoStop.py
|
IronMines/PST2017-2018
|
ea97c521380edcee7d2f991c402d1856c77e0739
|
[
"MIT"
] | null | null | null |
system_disk/home/pi/tests/testAutoStop.py
|
IronMines/PST2017-2018
|
ea97c521380edcee7d2f991c402d1856c77e0739
|
[
"MIT"
] | null | null | null |
system_disk/home/pi/tests/testAutoStop.py
|
IronMines/PST2017-2018
|
ea97c521380edcee7d2f991c402d1856c77e0739
|
[
"MIT"
] | null | null | null |
#!/bin/usr/python
import RPi.GPIO as GPIO
import time
import serial
GPIO.setmode(GPIO.BCM)
inputs_IO = [5,6,13,19,26,12]
for i in inputs_IO:
GPIO.setup(i, GPIO.IN)
ser = serial.Serial(
port="/dev/ttyS0",
baudrate=115200,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS
)
ser = serial.Serial(
port="/dev/ttyS0",
baudrate=115200,
parity=serial.PARITY_NONE,
stopbits=1,
bytesize=8
)
ser.isOpen()
ser.write("z")
time.sleep(0.5)
ser.write("mc")
time.sleep(0.5)
ser.write("mw30")
time.sleep(0.05)
ser.write("z")
time.sleep(0.05)
print("Motor on")
run = True
while run:
l = "0" if GPIO.input(5)==1 else "1"
fl = "0" if GPIO.input(19)==1 else "1"
f = "0" if GPIO.input(6)==1 else "1"
fr = "0" if GPIO.input(26)==1 else "1"
r = "0" if GPIO.input(13)==1 else "1"
b = "0" if GPIO.input(12)==1 else "1"
test = l + fl + f + fr + r + b
if( test != "000000" ):
ser.write("mc")
time.sleep(0.05)
ser.write("z")
run = False
print("Wall detected")
print "Left\t| Front Left\t| Front\t| Front Right\t| Right\t| Behind"
print " "+l+"\t| "+fl+"\t\t| "+f+"\t| "+fr+"\t\t| "+r+"\t|"+b
print("Bye")
| 20.482143
| 71
| 0.628596
|
47142723f444895fe045d87967c8adfeaacb8ea3
| 3,268
|
py
|
Python
|
avalanche/benchmarks/datasets/core50/core50_data.py
|
aishikhar/avalanche
|
39c361aba1663795ed33f093ab2e15cc5792026e
|
[
"MIT"
] | 1
|
2021-08-11T19:43:38.000Z
|
2021-08-11T19:43:38.000Z
|
avalanche/benchmarks/datasets/core50/core50_data.py
|
aishikhar/avalanche
|
39c361aba1663795ed33f093ab2e15cc5792026e
|
[
"MIT"
] | null | null | null |
avalanche/benchmarks/datasets/core50/core50_data.py
|
aishikhar/avalanche
|
39c361aba1663795ed33f093ab2e15cc5792026e
|
[
"MIT"
] | 1
|
2021-04-09T08:10:27.000Z
|
2021-04-09T08:10:27.000Z
|
################################################################################
# Copyright (c) 2021 ContinualAI. #
# Copyrights licensed under the MIT License. #
# See the accompanying LICENSE file for terms. #
# #
# Date: 11-05-2020 #
# Author: ContinualAI #
# E-mail: contact@continualai.org #
# Website: continualai.org #
################################################################################
import os
import sys
import logging
from zipfile import ZipFile
if sys.version_info[0] >= 3:
from urllib.request import urlretrieve
else:
# Not Python 3 - today, it is most likely to be Python 2
# But note that this might need an update when Python 4
# might be around one day
from urllib import urlretrieve
filename = [
('core50_128x128.zip',
'http://bias.csr.unibo.it/maltoni/download/core50/core50_128x128.zip'),
('paths.pkl', 'https://vlomonaco.github.io/core50/data/paths.pkl'),
('LUP.pkl', 'https://vlomonaco.github.io/core50/data/LUP.pkl'),
('labels.pkl', 'https://vlomonaco.github.io/core50/data/labels.pkl'),
('core50_imgs.npz',
'http://bias.csr.unibo.it/maltoni/download/core50/core50_imgs.npz'),
('batches_filelists.zip',
'https://vlomonaco.github.io/core50/data/batches_filelists.zip'),
('batches_filelists_NICv2.zip',
'https://vlomonaco.github.io/core50/data/batches_filelists_NICv2.zip')
]
class CORE50_DATA(object):
"""
CORE50 downloader.
"""
def __init__(self, data_folder='data/'):
"""
Args:
data_folder (string): folder in which to download core50 dataset.
"""
self.log = logging.getLogger("avalanche")
if os.path.isabs(data_folder):
self.data_folder = data_folder
else:
self.data_folder = os.path.join(os.path.dirname(__file__),
data_folder)
try:
# Create target Directory for CORE50 data
os.makedirs(self.data_folder)
self.log.info("Directory %s created", self.data_folder)
self.download = True
self.download_core50()
except OSError:
self.download = False
self.log.error("Directory %s already exists", self.data_folder)
def download_core50(self):
for name in filename:
self.log.info("Downloading " + name[1] + "...")
urlretrieve(name[1], os.path.join(self.data_folder, name[0]))
if name[1].endswith('.zip'):
with ZipFile(
os.path.join(self.data_folder, name[0]), 'r') as zipf:
self.log.info('Extracting CORe50 images...')
zipf.extractall(self.data_folder)
self.log.info('Done!')
self.log.info("Download complete.")
__all__ = [
'CORE50_DATA'
]
| 35.912088
| 80
| 0.504284
|
a1cc76889d94f049359fb7874352e7d58055eba0
| 823
|
py
|
Python
|
google/ads/google_ads/v4/services/ad_group_ad_asset_view_service_client_config.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | 1
|
2021-04-09T04:28:47.000Z
|
2021-04-09T04:28:47.000Z
|
google/ads/google_ads/v4/services/ad_group_ad_asset_view_service_client_config.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
google/ads/google_ads/v4/services/ad_group_ad_asset_view_service_client_config.py
|
arammaliachi/google-ads-python
|
a4fe89567bd43eb784410523a6306b5d1dd9ee67
|
[
"Apache-2.0"
] | null | null | null |
config = {
"interfaces": {
"google.ads.googleads.v4.services.AdGroupAdAssetViewService": {
"retry_codes": {
"idempotent": [
"DEADLINE_EXCEEDED",
"UNAVAILABLE"
],
"non_idempotent": []
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 5000,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 3600000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 3600000,
"total_timeout_millis": 3600000
}
},
"methods": {
"GetAdGroupAdAssetView": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default"
}
}
}
}
}
| 25.71875
| 67
| 0.53706
|
0738ae74ea8317102423a614311d162935cd5d23
| 3,280
|
py
|
Python
|
keras/utils/layer_utils.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 5
|
2020-11-30T22:26:03.000Z
|
2020-12-01T22:34:25.000Z
|
keras/utils/layer_utils.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 10
|
2020-12-01T22:55:29.000Z
|
2020-12-11T18:31:46.000Z
|
keras/utils/layer_utils.py
|
ikingye/keras
|
1a3ee8441933fc007be6b2beb47af67998d50737
|
[
"MIT"
] | 15
|
2020-11-30T22:12:22.000Z
|
2020-12-09T01:32:48.000Z
|
"""Utilities related to layer/model functionality.
"""
from .conv_utils import convert_kernel
from .. import backend as K
import numpy as np
from tensorflow.keras.utils import get_source_inputs
from tensorflow.python.keras.utils.layer_utils import print_summary
def count_params(weights):
"""Count the total number of scalars composing the weights.
# Arguments
weights: An iterable containing the weights on which to compute params
# Returns
The total number of scalars composing the weights
"""
weight_ids = set()
total = 0
for w in weights:
if id(w) not in weight_ids:
weight_ids.add(id(w))
total += int(K.count_params(w))
return total
def convert_all_kernels_in_model(model):
"""Converts all convolution kernels in a model from Theano to TensorFlow.
Also works from TensorFlow to Theano.
# Arguments
model: target model for the conversion.
"""
# Note: SeparableConvolution not included
# since only supported by TF.
conv_classes = {
'Conv1D',
'Conv2D',
'Conv3D',
'Conv2DTranspose',
}
to_assign = []
for layer in model.layers:
if layer.__class__.__name__ in conv_classes:
original_kernel = K.get_value(layer.kernel)
converted_kernel = convert_kernel(original_kernel)
to_assign.append((layer.kernel, converted_kernel))
K.batch_set_value(to_assign)
def convert_dense_weights_data_format(dense,
previous_feature_map_shape,
target_data_format='channels_first'):
"""Utility useful when changing a convnet's `data_format`.
When porting the weights of a convnet from one data format to the other,
if the convnet includes a `Flatten` layer
(applied to the last convolutional feature map)
followed by a `Dense` layer, the weights of that `Dense` layer
should be updated to reflect the new dimension ordering.
# Arguments
dense: The target `Dense` layer.
previous_feature_map_shape: A shape tuple of 3 integers,
e.g. `(512, 7, 7)`. The shape of the convolutional
feature map right before the `Flatten` layer that
came before the target `Dense` layer.
target_data_format: One of "channels_last", "channels_first".
Set it "channels_last"
if converting a "channels_first" model to "channels_last",
or reciprocally.
"""
assert target_data_format in {'channels_last', 'channels_first'}
kernel, bias = dense.get_weights()
for i in range(kernel.shape[1]):
if target_data_format == 'channels_first':
c, h, w = previous_feature_map_shape
original_fm_shape = (h, w, c)
ki = kernel[:, i].reshape(original_fm_shape)
ki = np.transpose(ki, (2, 0, 1)) # last -> first
else:
h, w, c = previous_feature_map_shape
original_fm_shape = (c, h, w)
ki = kernel[:, i].reshape(original_fm_shape)
ki = np.transpose(ki, (1, 2, 0)) # first -> last
kernel[:, i] = np.reshape(ki, (np.prod(previous_feature_map_shape),))
dense.set_weights([kernel, bias])
| 35.652174
| 78
| 0.641159
|
dca65a02dc9a91bbe106614c061a406e60408881
| 117
|
py
|
Python
|
Python 3.8/1096 - Sequencia IJ 2.py
|
JhonatanGuilherme/BeeCrowd
|
e039f8128399697ad9eb75f48047b83eb7b0201e
|
[
"MIT"
] | null | null | null |
Python 3.8/1096 - Sequencia IJ 2.py
|
JhonatanGuilherme/BeeCrowd
|
e039f8128399697ad9eb75f48047b83eb7b0201e
|
[
"MIT"
] | null | null | null |
Python 3.8/1096 - Sequencia IJ 2.py
|
JhonatanGuilherme/BeeCrowd
|
e039f8128399697ad9eb75f48047b83eb7b0201e
|
[
"MIT"
] | null | null | null |
I = 1
J = 7
while I < 10:
print('I={} J={}'.format(I, J))
J -= 1
if J < 5:
J += 3
I += 2
| 13
| 35
| 0.316239
|
03104f50061698832f29505894abebbb2e01de77
| 1,149
|
py
|
Python
|
ansible/venv/lib/python2.7/site-packages/ansible/plugins/action/slxos_config.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 17
|
2017-06-07T23:15:01.000Z
|
2021-08-30T14:32:36.000Z
|
ansible/venv/lib/python2.7/site-packages/ansible/plugins/action/slxos_config.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 9
|
2017-06-25T03:31:52.000Z
|
2021-05-17T23:43:12.000Z
|
ansible/venv/lib/python2.7/site-packages/ansible/plugins/action/slxos_config.py
|
gvashchenkolineate/gvashchenkolineate_infra_trytravis
|
0fb18850afe0d8609693ba4b23f29c7cda17d97f
|
[
"MIT"
] | 3
|
2018-05-26T21:31:22.000Z
|
2019-09-28T17:00:45.000Z
|
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible.plugins.action.network import ActionModule as ActionNetworkModule
PRIVATE_KEYS_RE = re.compile('__.+__')
class ActionModule(ActionNetworkModule):
def run(self, tmp=None, task_vars=None):
del tmp # tmp no longer has any effect
self._config_module = True
return super(ActionModule, self).run(task_vars=task_vars)
| 31.916667
| 78
| 0.752829
|
43adaf374bff78bef0ece0166067491416f76b68
| 1,696
|
py
|
Python
|
geoposition/fields.py
|
avallbona/django-geoposition
|
ce8c40c60bf7dfdcec4b47b2f3e013fd75b7fcc3
|
[
"MIT"
] | 13
|
2018-10-28T21:39:45.000Z
|
2022-02-05T02:55:43.000Z
|
geoposition/fields.py
|
avallbona/django-geoposition
|
ce8c40c60bf7dfdcec4b47b2f3e013fd75b7fcc3
|
[
"MIT"
] | 5
|
2019-04-05T13:48:51.000Z
|
2020-03-25T10:40:18.000Z
|
geoposition/fields.py
|
avallbona/django-geoposition
|
ce8c40c60bf7dfdcec4b47b2f3e013fd75b7fcc3
|
[
"MIT"
] | 13
|
2018-11-13T12:11:55.000Z
|
2021-12-28T09:40:41.000Z
|
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_text
from . import Geoposition
from .forms import GeopositionField as GeopositionFormField
class GeopositionField(models.Field):
description = _("A geoposition (latitude and longitude)")
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 42
super(GeopositionField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'CharField'
def to_python(self, value):
if not value or value == 'None':
return None
if isinstance(value, Geoposition):
return value
if isinstance(value, list):
return Geoposition(value[0], value[1])
# default case is string
value_parts = value.rsplit(',')
try:
latitude = value_parts[0]
except IndexError:
latitude = '0.0'
try:
longitude = value_parts[1]
except IndexError:
longitude = '0.0'
return Geoposition(latitude, longitude)
# TODO: drop context parameter when dropping 1.11 support
def from_db_value(self, value, expression, connection, context=None):
return self.to_python(value)
def get_prep_value(self, value):
return str(value)
def value_to_string(self, obj):
value = self.value_from_object(obj)
return smart_text(value)
def formfield(self, **kwargs):
defaults = {'form_class': GeopositionFormField}
defaults.update(kwargs)
return super(GeopositionField, self).formfield(**defaults)
| 29.754386
| 73
| 0.652712
|
6ace5e17a0872fffca819597d78b7f62acaa2704
| 85
|
py
|
Python
|
Course/apps.py
|
Ryize/CourseMC
|
1f2823870462b4f6957621975c9b657430649bd4
|
[
"Apache-2.0"
] | 2
|
2021-12-20T16:57:02.000Z
|
2021-12-20T17:00:24.000Z
|
Course/apps.py
|
Ryize/CourseMC
|
1f2823870462b4f6957621975c9b657430649bd4
|
[
"Apache-2.0"
] | null | null | null |
Course/apps.py
|
Ryize/CourseMC
|
1f2823870462b4f6957621975c9b657430649bd4
|
[
"Apache-2.0"
] | null | null | null |
from django.apps import AppConfig
class AuthConfig(AppConfig):
name = 'Course'
| 14.166667
| 33
| 0.741176
|
b740a509209e5ad0cd98eeebebcfcb8763cd78cc
| 903
|
py
|
Python
|
filter.py
|
lsh0357/pytorch-CycleGAN-and-pix2pix
|
4bef8e8895e0233aba5f385512719a96b5f05410
|
[
"BSD-3-Clause"
] | 2
|
2020-05-14T09:09:02.000Z
|
2020-11-24T04:36:23.000Z
|
filter.py
|
lsh0357/pytorch-CycleGAN-and-pix2pix
|
4bef8e8895e0233aba5f385512719a96b5f05410
|
[
"BSD-3-Clause"
] | null | null | null |
filter.py
|
lsh0357/pytorch-CycleGAN-and-pix2pix
|
4bef8e8895e0233aba5f385512719a96b5f05410
|
[
"BSD-3-Clause"
] | 1
|
2020-06-07T08:36:20.000Z
|
2020-06-07T08:36:20.000Z
|
import glob, os, shutil
def main():
testA, testB = 200, 400
for filename in glob.iglob('/Users/shihaoli/Downloads/face-pics/**', recursive=True):
if os.path.isfile(filename): # filter dirs
fn = os.path.basename(filename)
age = int(fn.split('_')[0])
if age <= 5:
if testA > 0:
shutil.move(filename, "/Users/shihaoli/Downloads/old2young/testA")
testA -= 1
else:
shutil.move(filename, "/Users/shihaoli/Downloads/old2young/trainA")
elif 25 <= age <= 35:
if testB > 0:
shutil.move(filename, "/Users/shihaoli/Downloads/old2young/testB")
testB -= 1
else:
shutil.move(filename, "/Users/shihaoli/Downloads/old2young/trainB")
if __name__ == "__main__":
main()
| 39.26087
| 89
| 0.520487
|
53df09a1edbd8b758b3c0cbeea23acb6b7b7e781
| 2,490
|
py
|
Python
|
chainer_ssim/structural_similarity3d_loss.py
|
zEttOn86/chainer-ssim
|
e0e87600b8f9616415d3ad4eeb95357c3efb49ce
|
[
"MIT"
] | null | null | null |
chainer_ssim/structural_similarity3d_loss.py
|
zEttOn86/chainer-ssim
|
e0e87600b8f9616415d3ad4eeb95357c3efb49ce
|
[
"MIT"
] | null | null | null |
chainer_ssim/structural_similarity3d_loss.py
|
zEttOn86/chainer-ssim
|
e0e87600b8f9616415d3ad4eeb95357c3efb49ce
|
[
"MIT"
] | 1
|
2022-01-17T10:46:46.000Z
|
2022-01-17T10:46:46.000Z
|
#coding:utf-8
import chainer
import chainer.functions as F
import numpy as np
from math import exp
def gaussian(window_size, sigma, xp):
"""
https://daily.belltail.jp/?p=2457
"""
x = xp.arange(0, window_size, dtype=xp.float32)
gauss = xp.exp(-(x-window_size//2)**2/(2*sigma**2))
# gauss = chainer.Variable(xp.array([exp(-(x - window_size//2)**2/float(2*sigma**2)) for x in range(window_size)], dtype=xp.float32))
return gauss/xp.sum(gauss)
def create_window(window_size, channel, xp):
weight = gaussian(window_size, 1.5, xp)
x_window = weight.reshape(1, 1, 1, 1, -1) # (out_ch, in_ch, z, y, x)
x_window = xp.repeat(x_window, channel, axis=0)
y_window = weight.reshape(1, 1, 1, -1, 1) # (out_ch, in_ch, z, y, x)
y_window = xp.repeat(y_window, channel, axis=0)
z_window = weight.reshape(1, 1, -1, 1, 1) # (out_ch, in_ch, z, y, x)
z_window = xp.repeat(z_window, channel, axis=0)
return x_window, y_window, z_window
def gaussian_filter(img, window, pad, channel):
x_window, y_window, z_window = window
h = F.convolution_3d(img, x_window, pad=(0, 0, pad), groups=channel)
h = F.convolution_3d(h, y_window, pad=(0, pad, 0), groups=channel)
return F.convolution_3d(h, z_window, pad=(pad, 0, 0), groups=channel)
def _calc_ssim(img1, img2, window, window_size, channel, data_range, size_average=True):
mu1 = gaussian_filter(img1, window, pad=window_size//2, channel=channel)
mu2 = gaussian_filter(img2, window, pad=window_size//2, channel=channel)
mu1_sq = F.square(mu1)
mu2_sq = F.square(mu2)
mu1_mu2 = mu1 * mu2
sigma1_sq = gaussian_filter(img1*img1, window, pad=window_size//2, channel=channel) - mu1_sq
sigma2_sq = gaussian_filter(img2*img2, window, pad=window_size//2, channel=channel) - mu2_sq
sigma12 = gaussian_filter(img1*img2, window, pad=window_size//2, channel=channel) - mu1_mu2
C1 = (0.01*data_range)**2
C2 = (0.03*data_range)**2
ssim_map = ((2*mu1_mu2 + C1)*(2*sigma12 + C2))/((mu1_sq + mu2_sq + C1)*(sigma1_sq + sigma2_sq + C2))
if size_average:
return F.mean(ssim_map)
return NotImplementedError()
def structural_similarity3d_loss(img1, img2, window_size = 11, data_range=1, size_average = True):
(_, channel, _, _, _) = img1.shape
xp = chainer.backends.cuda.get_array_module(img1)
window = create_window(window_size, channel, xp)
return _calc_ssim(img1, img2, window, window_size, channel, data_range, size_average)
| 40.819672
| 137
| 0.681526
|
c75748c88d36f80b7b832a104000a7fd266bb135
| 19,820
|
py
|
Python
|
opacus/sv_privacy_engine.py
|
twosixlabs/opacus
|
c52af6f1e4cd32d27e60e255ec52a51ff135f996
|
[
"Apache-2.0"
] | null | null | null |
opacus/sv_privacy_engine.py
|
twosixlabs/opacus
|
c52af6f1e4cd32d27e60e255ec52a51ff135f996
|
[
"Apache-2.0"
] | null | null | null |
opacus/sv_privacy_engine.py
|
twosixlabs/opacus
|
c52af6f1e4cd32d27e60e255ec52a51ff135f996
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import math
import os
import types
import warnings
from functools import partial
from typing import List, Optional, Tuple, Union
import torch
from scipy.stats import planck
from torch import Tensor, nn
import numpy as np
from opacus.grad_sample import GradSampleModule
from .grad_sample.utils import accum_grads_across_passes
from opacus.utils.tensor_utils import calc_sample_norms_one_layer
from . import privacy_analysis
from .dp_model_inspector import DPModelInspector
from .layers.dp_ddp import (
DifferentiallyPrivateDistributedDataParallel,
average_gradients,
)
from .per_sample_gradient_clip import PerSampleGradientClipper
from .utils import clipping
class SVPrivacyEngine:
r"""
The main component of Opacus is the ``PrivacyEngine``.
To train a model with differential privacy, all you need to do
is to define a ``PrivacyEngine`` and later attach it to your
optimizer before running.
Example:
This example shows how to define a ``PrivacyEngine`` and to attach
it to your optimizer.
>>> import torch
>>> model = torch.nn.Linear(16, 32) # An example model
>>> optimizer = torch.optim.SGD(model.parameters(), lr=0.05)
>>> privacy_engine = PrivacyEngine(model, sample_rate=0.01, noise_multiplier=1.3, max_grad_norm=1.0)
>>> privacy_engine.attach(optimizer) # That's it! Now it's business as usual.
"""
# flake8: noqa: C901
def __init__(
self,
module: nn.Module,
*, # As per PEP 3102, this forces clients to specify kwargs explicitly, not positionally
batch_size: Optional[int] = None,
sample_size: Optional[int] = None,
secure_rng: bool = False,
batch_first: bool = True,
target_delta: float = 1e-6,
target_epsilon: Optional[float] = None,
epochs: Optional[float] = None,
rho_per_epoch: float = 1000,
smooth_sens_t: float = 0.01,
**misc_settings,
):
r"""
Args:
module: The Pytorch module to which we are attaching the privacy engine
alphas: A list of RDP orders
noise_multiplier: The ratio of the standard deviation of the Gaussian noise to
the L2-sensitivity of the function to which the noise is added
max_grad_norm: The maximum norm of the per-sample gradients. Any gradient with norm
higher than this will be clipped to this value.
batch_size: Training batch size. Used in the privacy accountant.
sample_size: The size of the sample (dataset). Used in the privacy accountant.
sample_rate: Sample rate used to build batches. Used in the privacy accountant.
secure_rng: If on, it will use ``torchcsprng`` for secure random number generation.
Comes with a significant performance cost, therefore it's recommended that you
turn it off when just experimenting.
batch_first: Flag to indicate if the input tensor to the corresponding module
has the first dimension representing the batch. If set to True, dimensions on
input tensor will be ``[batch_size, ..., ...]``.
target_delta: The target delta. If unset, we will set it for you.
loss_reduction: Indicates if the loss reduction (for aggregating the gradients)
is a sum or a mean operation. Can take values "sum" or "mean"
**misc_settings: Other arguments to the init
"""
self.steps = 0
self.batch_size = batch_size
self.sample_size = sample_size
if isinstance(
module, DifferentiallyPrivateDistributedDataParallel
) or isinstance(module, torch.nn.parallel.DistributedDataParallel):
rank = torch.distributed.get_rank()
n_replicas = torch.distributed.get_world_size()
else:
rank = 0
n_replicas = 1
self.module = GradSampleModule(module, accum_passes=True)
self.target_delta = target_delta
self.secure_rng = secure_rng
self.batch_first = batch_first
self.misc_settings = misc_settings
self.n_replicas = n_replicas
self.rank = rank
self.smooth_sens_t = smooth_sens_t
self.rho_per_epoch = rho_per_epoch
self.rho_per_weight = rho_per_epoch / sum(p.numel() for p in self.module.parameters() if p.requires_grad)
self._optimize_sigma()
self.voted_this_batch = False
self.device = next(module.parameters()).device
self.steps = 0
if not self.target_delta:
if self.sample_size:
warnings.warn(
"target_delta unset. Setting it to an order of magnitude less than 1/sample_size."
)
self.target_delta = 0.1 * (1 / self.sample_size)
else:
raise ValueError("Please provide a target_delta.")
if self.secure_rng:
self.seed = None
try:
import torchcsprng as csprng
except ImportError as e:
msg = (
"To use secure RNG, you must install the torchcsprng package! "
"Check out the instructions here: https://github.com/pytorch/csprng#installation"
)
raise ImportError(msg) from e
self.seed = None
self.random_number_generator = csprng.create_random_device_generator(
"/dev/urandom"
)
else:
warnings.warn(
"Secure RNG turned off. This is perfectly fine for experimentation as it allows "
"for much faster training performance, but remember to turn it on and retrain "
"one last time before production with ``secure_rng`` turned on."
)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
self.seed = int.from_bytes(os.urandom(8), byteorder="big", signed=True)
self.random_number_generator = self._set_seed(self.seed)
self.validator = DPModelInspector()
self.clipper = None # lazy initialization in attach
def state_dict(self):
return {
"steps": self.steps,
}
def load_state_dict(self, state_dict):
self.steps = state_dict["steps"]
def detach(self):
r"""
Detaches the privacy engine from optimizer.
To detach the ``PrivacyEngine`` from optimizer, this method returns
the model and the optimizer to their original states (i.e. all
added attributes/methods will be removed).
"""
# 1. Fix optimizer
optim = self.optimizer
optim.step = optim.original_step
delattr(optim, "privacy_engine")
delattr(optim, "original_step")
delattr(optim, "original_zero_grad")
delattr(optim, "virtual_step")
# 2. Fix module
self.module._close()
def attach(self, optimizer: torch.optim.Optimizer):
r"""
Attaches the privacy engine to the optimizer.
Attaches to the ``PrivacyEngine`` an optimizer object,and injects
itself into the optimizer's step. To do that it,
1. Validates that the model does not have unsupported layers.
2. Adds a pointer to this object (the ``PrivacyEngine``) inside the optimizer.
3. Moves optimizer's original ``step()`` function to ``original_step()``.
4. Monkeypatches the optimizer's ``step()`` function to call ``step()`` on
the query engine automatically whenever it would call ``step()`` for itself.
Args:
optimizer: The optimizer to which the privacy engine will attach
"""
if hasattr(optimizer, "privacy_engine"):
if optimizer.privacy_engine != self:
raise ValueError(
f"Trying to attach to optimizer: {optimizer}, but that optimizer is "
f"already attached to a different Privacy Engine: {optimizer.privacy_engine}."
)
else:
warnings.warn(
"Trying to attach twice to the same optimizer. Nothing to do."
)
return
self.validator.validate(self.module)
def dp_zero_grad(self):
self.privacy_engine.zero_grad()
self.original_zero_grad()
def dp_step(self, closure=None, is_empty=False):
# When the DDP hooks are activated, there is no need for ``PrivacyEngine.step()``
# because the clipping and noising are performed by the hooks at the end of the backward pass
if hasattr(self.privacy_engine.module, "ddp_hooks"):
# We just update the accountant
self.privacy_engine.steps += 1
else:
self.privacy_engine.step(is_empty)
if isinstance(
self.privacy_engine.module._module,
DifferentiallyPrivateDistributedDataParallel,
):
average_gradients(self.privacy_engine.module)
self.original_step(closure)
optimizer.privacy_engine = self
optimizer.dp_step = types.MethodType(dp_step, optimizer)
optimizer.original_step = optimizer.step
optimizer.step = types.MethodType(dp_step, optimizer)
optimizer.original_zero_grad = optimizer.zero_grad
optimizer.zero_grad = types.MethodType(dp_zero_grad, optimizer)
def virtual_step(self):
if hasattr(self.privacy_engine.module, "ddp_hooks"):
raise NotImplementedError("DDP hook does not support virtual steps.")
self.privacy_engine.virtual_step()
optimizer.virtual_step = types.MethodType(virtual_step, optimizer)
# create a cross reference for detaching
self.optimizer = optimizer
def get_privacy_spent(
self, target_delta: Optional[float] = None
) -> Tuple[float, float]:
n_epochs = int((self.steps-1) * (self.batch_size // self.sample_size))+1 # round up
rho = self.rho_per_epoch * n_epochs
return rho + 2*np.sqrt(rho * np.log(1/target_delta)), target_delta
def zero_grad(self):
"""
Resets clippers status.
Clipper keeps internal gradient per sample in the batch in each
``forward`` call of the module, they need to be cleaned before the
next round.
If these variables are not cleaned the per sample gradients keep
being concatenated accross batches. If accumulating gradients
is intented behavious, e.g. simulating a large batch, prefer
using ``virtual_step()`` function.
"""
if self.clipper is not None:
self.clipper.zero_grad()
def disable_hooks(self) -> None:
self.module.forward_hooks_enabled = False
self.module.backward_hooks_enabled = False
def enable_hooks(self) -> None:
self.module.forward_hooks_enabled = True
self.module.backward_hooks_enabled = True
def disable_forward_hooks(self):
self.module.forward_hooks_enabled = False
def disable_backward_hooks(self):
self.module.backward_hooks_enabled = False
def enable_forward_hooks(self):
self.module.forward_hooks_enabled = True
def enable_backward_hooks(self):
self.module.backward_hooks_enabled = True
def set_accum_passes(self, accum_passes: bool):
self.module.set_accum_passes(accum_passes)
def vote_on_grads(self):
with torch.no_grad():
batch_size = next(iter(self.module.parameters())).grad_sample.size(1)
for p in (pa for pa in self.module.parameters() if pa.requires_grad):
# reshape p.grad_sample to shape (num_params_in_layer, batch_size)
grad_shape = p.grad_sample.shape[2:]
#print(p.grad_sample.reshape(batch_size, -1).transpose(0, 1).mean(dim=1))
p.grad_sample = (p.grad_sample > 0).reshape(batch_size, -1).transpose(0, 1)
sensitivities = 1
p.grad = (p.grad_sample.sum(dim=1) > p.grad_sample.size(1) // 2).reshape(grad_shape).float() * 2 - 1
del sensitivities
del p.grad_sample
self.voted_this_batch = True
def step(self, is_empty: bool = False):
self.steps += 1
if not self.voted_this_batch:
self.vote_on_grads()
self.voted_this_batch = False
def to(self, device: Union[str, torch.device]):
"""
Moves the privacy engine to the target device.
Args:
device : The device on which Pytorch Tensors are allocated.
See: https://pytorch.org/docs/stable/tensor_attributes.html#torch.torch.device
Example:
This example shows the usage of this method, on how to move the model
after instantiating the ``PrivacyEngine``.
>>> model = torch.nn.Linear(16, 32) # An example model. Default device is CPU
>>> privacy_engine = PrivacyEngine(model, sample_rate=0.01, noise_multiplier=0.8, max_grad_norm=0.5)
>>> device = "cuda:3" # GPU
>>> model.to(device) # If we move the model to GPU, we should call the to() method of the privacy engine (next line)
>>> privacy_engine.to(device)
Returns:
The current ``PrivacyEngine``
"""
self.device = device
return self
def virtual_step(self):
r"""
Takes a virtual step.
Virtual batches enable training with arbitrary large batch sizes, while
keeping the memory consumption constant. This is beneficial, when training
models with larger batch sizes than standard models.
Example:
Imagine you want to train a model with batch size of 2048, but you can only
fit batch size of 128 in your GPU. Then, you can do the following:
>>> for i, (X, y) in enumerate(dataloader):
>>> logits = model(X)
>>> loss = criterion(logits, y)
>>> loss.backward()
>>> if i % 16 == 15:
>>> optimizer.step() # this will call privacy engine's step()
>>> optimizer.zero_grad()
>>> else:
>>> optimizer.virtual_step() # this will call privacy engine's virtual_step()
The rough idea of virtual step is as follows:
1. Calling ``loss.backward()`` repeatedly stores the per-sample gradients
for all mini-batches. If we call ``loss.backward()`` ``N`` times on
mini-batches of size ``B``, then each weight's ``.grad_sample`` field will
contain ``NxB`` gradients. Then, when calling ``step()``, the privacy engine
clips all ``NxB`` gradients and computes the average gradient for an effective
batch of size ``NxB``. A call to ``optimizer.zero_grad()`` erases the
per-sample gradients.
2. By calling ``virtual_step()`` after ``loss.backward()``,the ``B``
per-sample gradients for this mini-batch are clipped and summed up into a
gradient accumulator. The per-sample gradients can then be discarded. After
``N`` iterations (alternating calls to ``loss.backward()`` and
``virtual_step()``), a call to ``step()`` will compute the average gradient
for an effective batch of size ``NxB``.
The advantage here is that this is memory-efficient: it discards the per-sample
gradients after every mini-batch. We can thus handle batches of arbitrary size.
"""
self.clipper.clip()
self.clipper.accumulate_batch()
def _local_layer_ddp_hook(
self, p: torch.Tensor, threshold: float, grad: torch.Tensor
):
"""
Backward hook attached to parameter `p`.
It replaces `grad` by `new_grad` using the per-sample gradients stored in p.grad_sample
Args:
# engine: the privacy engine (to get the DP options and clipping values)
p: the layer to clip and noise
threshold: the flat clipping value for that layer
grad: the gradient (unused, but this argument required to be a valid hook)
The hook operates like ``PrivacyEngine.step()``, but on a single layer:
1. clip_and_accumulate
2. get the clip_values to scale the noise
3. add the noise
"""
# Similar to `ConstantPerLayerClipper.pre_step()`
batch_size = p.grad_sample.shape[0]
clip_value = self.clipper.norm_clipper.thresholds.norm(2)
# Similar to `ConstantPerLayerClipper.calc_clipping_factors`)
norms = calc_sample_norms_one_layer(p.grad_sample)
per_sample_clip_factor = (threshold / (norms + 1e-6)).clamp(max=1.0)
# Do the clipping
summed_grad = self.clipper._weighted_sum(per_sample_clip_factor, p.grad_sample)
# Accumulate the summed gradient for this mini-batch
if hasattr(p, "summed_grad"):
p.summed_grad += summed_grad
else:
p.summed_grad = summed_grad
del p.grad_sample
# Average (or sum) across the batch
new_grad = self.clipper._scale_summed_grad(p.summed_grad, batch_size)
del p.summed_grad
# Only one GPU adds noise
if self.rank == 0:
noise = self._generate_noise(clip_value, new_grad) / batch_size
new_grad += noise
return new_grad
def _set_seed(self, seed: int):
r"""
Allows to manually set the seed allowing for a deterministic run. Useful if you want to
debug.
WARNING: MANUALLY SETTING THE SEED BREAKS THE GUARANTEE OF SECURE RNG.
For this reason, this method will raise a ValueError if you had ``secure_rng`` turned on.
Args:
seed : The **unsecure** seed
"""
if self.secure_rng:
raise ValueError(
"Seed was manually set on a ``PrivacyEngine`` with ``secure_rng`` turned on."
"This fundamentally breaks secure_rng, and cannot be allowed. "
"If you do need reproducibility with a fixed seed, first instantiate the PrivacyEngine "
"with ``secure_seed`` turned off."
)
self.seed = seed
return (
torch.random.manual_seed(self.seed)
if self.device.type == "cpu"
else torch.cuda.manual_seed(self.seed)
)
def _optimize_sigma(self):
def opt_exp(eps, t, sigma):
return 5 * (eps / t) * sigma**3 - 5 * sigma**2 - 1
target_eps = np.sqrt(2*self.rho_per_weight)
sigma_lower = self.smooth_sens_t / target_eps
sigma_upper = max(2*self.smooth_sens_t / target_eps, 1/2)
loss = opt_exp(target_eps, self.smooth_sens_t, np.mean([sigma_lower, sigma_upper]))
while np.abs(loss) > 0.001:
if loss < 0:
sigma_lower = np.mean([sigma_lower, sigma_upper])
else:
sigma_upper = np.mean([sigma_lower, sigma_upper])
loss = opt_exp(target_eps, self.smooth_sens_t, np.mean([sigma_lower, sigma_upper]))
self.sigma = np.mean([sigma_lower, sigma_upper])
print(self.sigma)
self.sens_scale = 1/(np.exp(-(3/2) * self.sigma**2) * (target_eps - (self.smooth_sens_t / self.sigma)))
print(self.sens_scale)
self.steps += 5
print(self.get_privacy_spent(1e-6))
| 39.719439
| 129
| 0.62114
|
5b0c9c0c3dfc20e8b87b19fdda125124a91f0304
| 6,276
|
py
|
Python
|
ndb/src/google/cloud/ndb/_datastore_api.py
|
shivasiddharth/google-cloud-python
|
91ac0aab77a1a17a8837c49b61df2415de60d2a3
|
[
"Apache-2.0"
] | null | null | null |
ndb/src/google/cloud/ndb/_datastore_api.py
|
shivasiddharth/google-cloud-python
|
91ac0aab77a1a17a8837c49b61df2415de60d2a3
|
[
"Apache-2.0"
] | null | null | null |
ndb/src/google/cloud/ndb/_datastore_api.py
|
shivasiddharth/google-cloud-python
|
91ac0aab77a1a17a8837c49b61df2415de60d2a3
|
[
"Apache-2.0"
] | 1
|
2020-11-15T11:44:36.000Z
|
2020-11-15T11:44:36.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Functions that interact with Datastore backend."""
import itertools
import grpc
from google.cloud import _helpers
from google.cloud import _http
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import datastore_pb2_grpc
from google.cloud.ndb import _eventloop
from google.cloud.ndb import _runstate
from google.cloud.ndb import tasklets
_BATCH_LOOKUP = "Lookup"
_NOT_FOUND = object()
def stub():
"""Get the stub for the `Google Datastore` API.
Gets the stub from the current context, creating one if there isn't one
already.
Returns:
:class:`~google.cloud.datastore_v1.proto.datastore_pb2_grpc.DatastoreStub`:
The stub instance.
"""
state = _runstate.current()
if state.stub is None:
client = state.client
if client.secure:
channel = _helpers.make_secure_channel(
client._credentials, _http.DEFAULT_USER_AGENT, client.host
)
else:
channel = grpc.insecure_channel(client.host)
state.stub = datastore_pb2_grpc.DatastoreStub(channel)
return state.stub
def lookup(key):
"""Look up a Datastore entity.
Gets an entity from Datastore, asynchronously. Actually adds the request to
a batch and fires off a Datastore Lookup call as soon as some code asks for
the result of one of the batched requests.
Args:
key (~datastore.Key): The key for the entity to retrieve.
Returns:
:class:`~tasklets.Future`: If not an exception, future's result will be
either an entity protocol buffer or _NOT_FOUND.
"""
key_pb = key.to_protobuf()
future = tasklets.Future()
batch = _get_lookup_batch()
batch.setdefault(key_pb, []).append(future)
return future
def _get_lookup_batch():
"""Gets a data structure for storing batched calls to Datastore Lookup.
The batch data structure is stored in the current run state. If there is
not already a batch started, a new structure is created and an idle
callback is added to the current event loop which will eventually perform
the batch look up.
Returns:
Dict[~datastore_v1.proto.entity_pb2.Key, List[~tasklets.Future]]
"""
state = _runstate.current()
batch = state.batches.get(_BATCH_LOOKUP)
if batch is not None:
return batch
state.batches[_BATCH_LOOKUP] = batch = {}
_eventloop.add_idle(_perform_batch_lookup)
return batch
def _perform_batch_lookup():
"""Perform a Datastore Lookup on all batched Lookup requests.
Meant to be used as an idle callback, so that calls to lookup entities can
be batched into a single request to the back end service as soon as running
code has need of one of the results.
"""
state = _runstate.current()
batch = state.batches.pop(_BATCH_LOOKUP, None)
if batch is None:
return
rpc = _datastore_lookup(batch.keys())
_eventloop.queue_rpc(rpc, BatchLookupCallback(batch))
class BatchLookupCallback:
"""Callback for processing the results of a call to Datastore Lookup.
Args:
batch (Dict[~datastore_v1.proto.entity_pb2.Key, List[~tasklets.Future]]): Mapping of keys
to futures for the batch request.
"""
def __init__(self, batch):
self.batch = batch
def __call__(self, rpc):
"""Process the results of a call to Datastore Lookup.
Each key in the batch will be in one of `found`, `missing`, or
`deferred`. `found` keys have their futures' results set with the
protocol buffers for their entities. `missing` keys have their futures'
results with `_NOT_FOUND`, a sentinel value. `deferrred` keys are
loaded into a new batch so they can be tried again.
Args:
rpc (grpc.Future): If not an exception, the result will be an
instance of
:class:`google.cloud.datastore_v1.datastore_pb.LookupResponse`
"""
batch = self.batch
# If RPC has resulted in an exception, propagate that exception to all
# waiting futures.
exception = rpc.exception()
if exception is not None:
for future in itertools.chain(*batch.values()):
future.set_exception(exception)
return
# Process results, which are divided into found, missing, and deferred
results = rpc.result()
# For all deferred keys, batch them up again with their original
# futures
if results.deferred:
next_batch = _get_lookup_batch()
for key in results.deferred:
next_batch.setdefault(key, []).extend(batch[key])
# For all missing keys, set result to _NOT_FOUND and let callers decide
# how to handle
for result in results.missing:
key = result.entity.key
for future in batch[key]:
future.set_result(_NOT_FOUND)
# For all found entities, set the result on their corresponding futures
for result in results.found:
entity = result.entity
for future in batch[entity.key]:
future.set_result(entity)
def _datastore_lookup(keys):
"""Issue a Lookup call to Datastore using gRPC.
Args:
keys (Iterable[datastore_v1.proto.entity_pb2.Key]): The entity keys to look up.
Returns:
:class:`grpc.Future`: Future object for eventual result of lookup.
"""
client = _runstate.current().client
request = datastore_pb2.LookupRequest(
project_id=client.project, keys=[key for key in keys]
)
api = stub()
return api.Lookup.future(request)
| 32.518135
| 97
| 0.675271
|
f151dfc704911b4bdc761015aa29cc491bf69be4
| 9,963
|
py
|
Python
|
contrib/spendfrom/spendfrom.py
|
Tyron888/EternCoin
|
3ef9655dcab278e50de048a561bcab4cddd3232b
|
[
"MIT"
] | null | null | null |
contrib/spendfrom/spendfrom.py
|
Tyron888/EternCoin
|
3ef9655dcab278e50de048a561bcab4cddd3232b
|
[
"MIT"
] | null | null | null |
contrib/spendfrom/spendfrom.py
|
Tyron888/EternCoin
|
3ef9655dcab278e50de048a561bcab4cddd3232b
|
[
"MIT"
] | 1
|
2022-03-15T23:27:06.000Z
|
2022-03-15T23:27:06.000Z
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend ETENs received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a eternd or etern-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the etern data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Etern/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Etern")
return os.path.expanduser("~/.etern")
def read_bitcoin_config(dbdir):
"""Read the etern.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "etern.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a etern JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 27004 if testnet else 27002
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the eternd we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(eternd):
info = eternd.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
eternd.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = eternd.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(eternd):
address_summary = dict()
address_to_account = dict()
for info in eternd.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = eternd.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = eternd.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-etern-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(eternd, fromaddresses, toaddress, amount, fee):
all_coins = list_available(eternd)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to eternd.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = eternd.createrawtransaction(inputs, outputs)
signed_rawtx = eternd.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(eternd, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = eternd.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(eternd, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = eternd.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(eternd, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get ETENs from")
parser.add_option("--to", dest="to", default=None,
help="address to get send ETENs to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of etern.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
eternd = connect_JSON(config)
if options.amount is None:
address_summary = list_available(eternd)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(eternd) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(eternd, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(eternd, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = eternd.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| 37.175373
| 111
| 0.629931
|
19d4bf7376b9c686d75a16db49c75ec102e38eb0
| 1,543
|
py
|
Python
|
setup.py
|
Quarticai/graphene-django-extras
|
66a59b6b255244dcb88bbcb858c38b14962502ad
|
[
"MIT"
] | null | null | null |
setup.py
|
Quarticai/graphene-django-extras
|
66a59b6b255244dcb88bbcb858c38b14962502ad
|
[
"MIT"
] | 77
|
2021-06-28T11:12:22.000Z
|
2022-03-14T09:07:39.000Z
|
setup.py
|
Quarticai/graphene-django-extras
|
66a59b6b255244dcb88bbcb858c38b14962502ad
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import sys
import os
from os.path import dirname
from setuptools import find_packages, setup
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
BASE_PKG_DIR = BASE_DIR
if os.path.exists(BASE_PKG_DIR) and os.path.isdir(BASE_PKG_DIR):
sys.path.insert(0, BASE_PKG_DIR)
else:
raise ValueError('Error in path')
def get_file_contents(filename):
with open(os.path.join(dirname(__file__), filename)) as fp:
return fp.read()
def get_install_requires():
requirements = get_file_contents('requirements.txt')
install_requires = []
for line in requirements.split('\n'):
line = line.strip()
if line and not line.startswith('-'):
install_requires.append(line)
return install_requires
setup(
name='quartic_graphene_django_extras',
description='Extra helper plugins for Graphene',
author='Quartic.ai Engineering Team',
long_description=get_file_contents('README.md'),
author_email='tech@quartic.ai',
url='https://github.com/eamigo86/graphene-django-extras/',
classifiers=[
'Programming Language :: Python :: 3.9'
],
install_requires=get_install_requires(),
include_package_data=True,
keywords='deming core',
packages=find_packages(exclude=['tests*']),
package_data={
# If any package contains *.so or *.pyi or *.lic files or *.key files,
# include them:
"": ["*.so", "*.pyi", "*.lic", "*.key"],
},
)
| 29.113208
| 82
| 0.645496
|
c14bb6b735e5688a2ff2fcaecb7c763a53521733
| 17,239
|
py
|
Python
|
src/ixdat/readers/zilien.py
|
matenestor/ixdat
|
ac3ff81f5c92f2d4bbede5fc9fc5a1df2a5eb34f
|
[
"MIT"
] | null | null | null |
src/ixdat/readers/zilien.py
|
matenestor/ixdat
|
ac3ff81f5c92f2d4bbede5fc9fc5a1df2a5eb34f
|
[
"MIT"
] | 1
|
2022-03-31T09:54:40.000Z
|
2022-03-31T09:54:40.000Z
|
src/ixdat/readers/zilien.py
|
matenestor/ixdat
|
ac3ff81f5c92f2d4bbede5fc9fc5a1df2a5eb34f
|
[
"MIT"
] | null | null | null |
"""Readers for files produces by the Zilien software from Spectro Inlets"""
import re
from collections import defaultdict
from pathlib import Path
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from ..data_series import DataSeries, TimeSeries, ValueSeries, Field
from ..techniques import ECMSMeasurement, MSMeasurement, ECMeasurement, Measurement
from ..techniques.ms import MSSpectrum
from .reading_tools import timestamp_string_to_tstamp, FLOAT_MATCH
ZILIEN_TIMESTAMP_FORM = "%Y-%m-%d %H_%M_%S" # like 2021-03-15 18_50_10
ZILIEN_EC_ALIASES = {
"t": ["Potential time [s]"],
"raw_potential": ["Voltage [V]"],
"raw_current": ["Current [mA]"],
"cycle": ["Cycle [n]"],
}
# The Zilien .tsv files can be loaded as three different experiment types. These are the
# aliases for each of them
ZILIEN_ALIASES = {
ECMSMeasurement: ZILIEN_EC_ALIASES,
MSMeasurement: {},
ECMeasurement: ZILIEN_EC_ALIASES,
}
# TODO: When, in the future, Zilien files include the whole EC dataset, remove the
# unflattering example presently in the docs.
# https://github.com/ixdat/ixdat/pull/30/files#r810087496
def parse_metadata_line(line):
"""Parse a single metadata line and return the name, value"""
# The metadata format is a 5 column format:
name, comment, attach_to_series, type_as_str, value = line.strip("\n").split("\t")
# Since, as yet, ixdat doesn't support per-series metadata, we prefix the per-series
# metadata item names with the name of the series, to avoid name clashes while still
# preserving the data
if attach_to_series:
full_name = f"{attach_to_series}_{name}"
else:
full_name = name
# Type convert the metadata (the specification for version 1 also has a color type,
# but as of yet it is not used)
if type_as_str == "string":
return full_name, value
elif type_as_str == "int":
return full_name, int(value)
elif type_as_str == "double":
return full_name, float(value)
elif type_as_str == "bool":
return full_name, value == "true"
else:
raise TypeError(f"Unknown metadata type {type_as_str} for {name}")
def to_snake_case(string):
"""Turn a space separated string into a snake_case string"""
return string.lower().replace(" ", "_")
COLUMN_HEADER_RE = re.compile(r"^(.+?) \[(.+?)\]$") # Matches: "{name} [{unit}]
MASS_SERIES_RE = re.compile(r"^C[0-9]+M([0-9]+)$") # Matches: "C??M{mass}"
def to_mass(string):
"""Return mass (i.e. "18") if `string` matches the C0M18 mass series form or None"""
possible_match = MASS_SERIES_RE.match(string)
if possible_match:
return possible_match.group(1)
return None
class ZilienTSVReader:
"""Class for reading files saved by Spectro Inlets' Zilien software"""
def __init__(self):
self._path_to_file = None
self._cls = None
self._measurement = None
def read(self, path_to_file, cls=ECMSMeasurement, name=None, **kwargs):
"""Read a Zilien file
Args:
path_to_file (Path or str): The path of the file to read
cls (Measurement): The measurement class to read the file as. Zilien tsv
files can be read both as an ECMS measurement, a MS measurement (which
will exclude the EC series from the meaurement) and as a ECMeasurement
(which will exclude the MS series from the measurement). To avoid
importing classes, this behavior can also be controlled by setting the
`technique` argument to either 'EC-MS', 'MS' or 'EC'. The deafult is a
ECMSMeasurement.
name (str): The name of the measurement. Will default to the part of the
filename before the '.tsv' extension
kwargs: All remaining keywor-arguments will be passed onto the `__init__` of
the Meaurement
"""
if self._path_to_file:
print(
f"This {self.__class__.__name__} has already read {self._path_to_file}. "
"Returning the measurement resulting from the original read. "
"Use a new Reader if you want to read another file."
)
return self._measurement
if "technique" in kwargs:
if kwargs["technique"] == "EC-MS":
cls = ECMSMeasurement
if kwargs["technique"] == "EC":
cls = ECMeasurement
if kwargs["technique"] == "MS":
cls = MSMeasurement
else:
if cls is Measurement:
cls = ECMSMeasurement
if issubclass(cls, ECMSMeasurement):
kwargs["technique"] = "EC-MS"
elif issubclass(cls, ECMeasurement):
kwargs["technique"] = "EC"
elif issubclass(cls, MSMeasurement):
kwargs["technique"] = "MS"
self._cls = cls
self._path_to_file = Path(path_to_file)
# Extract timestamp from filename on form: 2021-04-20 11_16_18 Measurement name
file_stem = self._path_to_file.stem # Part of filename before the extension
timestamp = timestamp_string_to_tstamp(
timestamp_string=" ".join(file_stem.split(" ")[:2]),
form=ZILIEN_TIMESTAMP_FORM,
)
# Parse metadata items
with open(self._path_to_file, encoding="utf-8") as file_handle:
metadata, series_headers, column_headers = self._read_metadata(file_handle)
file_position = file_handle.tell()
# Read raw data
with open(self._path_to_file, "rb") as file_handle:
file_handle.seek(file_position)
data = np.genfromtxt(file_handle, delimiter="\t")
# Extract series data and form series
series, aliases = self._form_series(
data, metadata, timestamp, series_headers, column_headers
)
for standard_name, general_aliases in ZILIEN_ALIASES[self._cls].items():
aliases[standard_name] += general_aliases
aliases = dict(aliases) # Convert from defaultdict to normal dict
measurement_kwargs = {
"name": name or file_stem,
"series_list": series,
"aliases": aliases,
"tstamp": timestamp,
"metadata": metadata,
}
measurement_kwargs.update(kwargs)
self._measurement = cls(**measurement_kwargs)
return self._measurement
def _form_series(self, data, metadata, timestamp, series_headers, column_headers):
"""Form the series and series aliases
Args:
data (numpy.array): The data block of the tsv file as an array
metadata (dict): Extracted metadata
timestamp (float): The timestamp of the measurement
series_headers (List[str]): List of series headers, slots with empty strings
means the same as the last non-empty one
column_headers (List[str]): List of column headers
Returns:
List[Series], DefaultDict(str, List[str]): List of series and dict of aliases
"""
series = []
last_time_series = None
aliases = defaultdict(list)
last_series_header = ""
# Iterate over numbered series and column headers
for column_number, (series_header, column_header) in enumerate(
zip(series_headers, column_headers)
):
last_series_header = series_header or last_series_header
# Skip series not relevant for the type of measurement
if not issubclass(self._cls, ECMeasurement) and last_series_header == "pot":
continue
elif not issubclass(self._cls, MSMeasurement) and to_mass(
last_series_header
):
continue
# Pluck column of the correct length out from the data block and form series
count = metadata[f"{last_series_header}_{last_series_header}_count"]
column_data = data[:count, column_number]
# Form the series_name, unit, aliases and update aliases
series_name, unit, standard_name = self._form_names_and_unit(
last_series_header, column_header
)
if standard_name:
aliases[standard_name].append(series_name)
# Form series kwargs and the series
series_kwargs = {
"name": series_name,
"unit_name": unit,
"data": column_data,
}
if column_header == "Time [s]": # Form TimeSeries
column_series = TimeSeries(**series_kwargs, tstamp=timestamp)
last_time_series = column_series
else:
column_series = ValueSeries(**series_kwargs, tseries=last_time_series)
series.append(column_series)
return series, aliases
@staticmethod
def _form_names_and_unit(series_header, column_header):
"""Return names and unit
Args:
series_header (str): Something like "Iongauge value" or "C0M18"
column_header (str): Something like "Time [s]" or "Time [s]"
Returns:
str, str, Optional[str]: Return series_name, unit, standard_name
"""
standard_name = None
if column_header == "Time [s]": # Form TimeSeries
unit = "s"
if series_header == "pot":
name = f"Potential {column_header.lower()}"
else:
name = f"{series_header} {column_header.lower()}"
else: # ValueSeries
# Perform a bit of reasonable name adaption, first break name and unit out
# from the column header on the form: Pressure [mbar]
column_components_match = COLUMN_HEADER_RE.match(column_header)
if column_components_match:
_, unit = column_components_match.groups()
else:
_, unit = column_header, ""
# Is the the column a "setpoint" or "value" type
setpoint_or_value = None
for option in ("setpoint", "value"):
if series_header.endswith(option):
setpoint_or_value = option
if setpoint_or_value:
# In that case, the column header is something like "Flow [ml/min]" where
# "Flow" is unnecessary, because that is apparent from the unit
name = f"{series_header} [{unit}]"
elif to_mass(series_header) is not None:
mass = to_mass(series_header)
name = f"M{mass} [{unit}]"
standard_name = f"M{mass}"
else:
name = column_header
return name, unit, standard_name
@staticmethod
def _read_metadata(file_handle):
"""Read metadata from `file_handle`"""
metadata = {}
# The first lines always include 3 pieces of information about the length of the
# header and sometimes a file format version. Start by reading the first 3 lines,
# to figure out if the version is amongst them
next_line_number_to_read = 3
for n in range(next_line_number_to_read):
key, value = parse_metadata_line(file_handle.readline())
metadata[key] = value
# If the version is among the first three lines, then we need to read one more
# line of metadata before we're guaranteed to have the num_header_lines
if "file_format_version" in metadata:
key, value = parse_metadata_line(file_handle.readline())
metadata[key] = value
next_line_number_to_read += 1
else:
metadata["file_format_version"] = 1
for _ in range(next_line_number_to_read, metadata["num_header_lines"]):
key, value = parse_metadata_line(file_handle.readline())
metadata[key] = value
series_headers = file_handle.readline().strip("\n").split("\t")
column_headers = file_handle.readline().strip("\n").split("\t")
return metadata, series_headers, column_headers
class ZilienTMPReader:
"""A class for stitching the files in a Zilien tmp directory to an ECMSMeasurement
This is necessary because Zilien often crashes, leaving only the tmp directory.
This is less advanced but more readable than the Spectro Inlets stitching solution.
"""
def __init__(self, path_to_tmp_dir=None):
self.path_to_tmp_dir = Path(path_to_tmp_dir) if path_to_tmp_dir else None
def read(self, path_to_tmp_dir, cls=None, **kwargs):
"""Make a measurement from all the single-value .tsv files in a Zilien tmp dir
Args:
path_to_tmp_dir (Path or str): The path to the tmp dir
cls (Measurement class): Defaults to ECMSMeasurement
"""
if path_to_tmp_dir:
self.path_to_tmp_dir = Path(path_to_tmp_dir)
cls = cls or ECMSMeasurement
name = self.path_to_tmp_dir.parent.name
timestamp_string = name[:19] # the zilien timestamp is the first 19 chars
tstamp = timestamp_string_to_tstamp(timestamp_string, form=ZILIEN_TIMESTAMP_FORM)
series_list = []
for tmp_file in self.path_to_tmp_dir.iterdir():
series_list += series_list_from_tmp(tmp_file)
obj_as_dict = {
"name": name,
"tstamp": tstamp,
"series_list": series_list,
"technique": "EC-MS",
"reader": self,
}
obj_as_dict.update(kwargs)
return cls.from_dict(obj_as_dict)
def series_list_from_tmp(path_to_file):
"""Return [ValueSeries, TimeSeries] with the data in a zilien tmp .tsv file"""
file_name = Path(path_to_file).name
timestamp_string = file_name[:19] # the zilien timestamp form is 19 chars long
tstamp = timestamp_string_to_tstamp(timestamp_string, form=ZILIEN_TIMESTAMP_FORM)
column_match = re.search(r"\.([^\.]+)\.data", file_name)
if not column_match:
print(f"could not find column name in {path_to_file}")
return []
v_name = column_match.group(1)
mass_match = re.search("M[0-9]+", v_name)
if mass_match:
v_name = mass_match.group()
unit = "A"
else:
unit = None
t_name = v_name + "-x"
df = pd.read_csv(path_to_file, delimiter="\t", names=[t_name, v_name], header=0)
t_data, v_data = df[t_name].to_numpy(), df[v_name].to_numpy()
tseries = TimeSeries(name=t_name, unit_name="s", data=t_data, tstamp=tstamp)
vseries = ValueSeries(name=v_name, unit_name=unit, data=v_data, tseries=tseries)
return [tseries, vseries]
class ZilienSpectrumReader:
"""A reader for individual Zilien spectra
TODO: A Zilien reader which loads all spectra at once in a SpectrumSeries object
"""
def __init__(self, path_to_spectrum=None):
self.path_to_spectrum = Path(path_to_spectrum) if path_to_spectrum else None
def read(self, path_to_spectrum, cls=None, **kwargs):
"""Reat a Zilien spectrum.
FIXME: This reader was written hastily and could be designed better.
Args:
path_to_tmp_dir (Path or str): the path to the tmp dir
cls (Spectrum class): Defaults to MSSpectrum
kwargs: Key-word arguments are passed on ultimately to cls.__init__
"""
if path_to_spectrum:
self.path_to_spectrum = Path(path_to_spectrum)
cls = cls or MSSpectrum
df = pd.read_csv(
path_to_spectrum,
header=9,
delimiter="\t",
)
x_name = "Mass [AMU]"
y_name = "Current [A]"
x = df[x_name].to_numpy()
y = df[y_name].to_numpy()
with open(self.path_to_spectrum, "r") as f:
for i in range(10):
line = f.readline()
if "Mass scan started at [s]" in line:
tstamp_match = re.search(FLOAT_MATCH, line)
tstamp = float(tstamp_match.group())
xseries = DataSeries(data=x, name=x_name, unit_name="m/z")
field = Field(
data=np.array(y),
name=y_name,
unit_name="A",
axes_series=[
xseries,
],
)
obj_as_dict = {
"name": path_to_spectrum.name,
"technique": "MS",
"field": field,
"reader": self,
"tstamp": tstamp,
}
obj_as_dict.update(kwargs)
return cls.from_dict(obj_as_dict)
def module_demo():
"""Module demo here.
To run this module in PyCharm, open Run Configuration and set
Module name = ixdat.readers.zilien,
and *not*
Script path = ...
"""
path_to_test_file = (
Path(__file__).parent.resolve().parent.parent.parent
/ "test_data"
/ "Zilien version 1"
/ "2022-04-06 16_17_23 full set.tsv"
)
ecms_measurement = Measurement.read(
reader="zilien",
path_to_file=path_to_test_file,
)
ecms_measurement.plot_measurement()
plt.show()
if __name__ == "__main__":
module_demo()
| 38.139381
| 89
| 0.617495
|
a0b656050daa42642c471ae98a46d7306ce32614
| 281
|
py
|
Python
|
4 kyu/Number of integer partitions.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 6
|
2020-09-03T09:32:25.000Z
|
2020-12-07T04:10:01.000Z
|
4 kyu/Number of integer partitions.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | 1
|
2021-12-13T15:30:21.000Z
|
2021-12-13T15:30:21.000Z
|
4 kyu/Number of integer partitions.py
|
mwk0408/codewars_solutions
|
9b4f502b5f159e68024d494e19a96a226acad5e5
|
[
"MIT"
] | null | null | null |
def partitions(n):
return helper(n, n)
def helper(n, n2, dict={}):
if n2==0:
return 0
elif n<=0:
return 1 if n==0 else 0
if dict.get((n, n2)):
return dict[(n, n2)]
dict[(n, n2)]=(helper(n, n2-1)+helper(n-n2, n2))
return dict[(n, n2)]
| 25.545455
| 52
| 0.512456
|
d7a8b7c999a95b4860382af5bf9fec5305d77dc2
| 7,280
|
py
|
Python
|
multitool/win.py
|
alexpdev/multitool
|
8a635410d5010d9be2e5118b887e26244589a5df
|
[
"MIT"
] | null | null | null |
multitool/win.py
|
alexpdev/multitool
|
8a635410d5010d9be2e5118b887e26244589a5df
|
[
"MIT"
] | null | null | null |
multitool/win.py
|
alexpdev/multitool
|
8a635410d5010d9be2e5118b887e26244589a5df
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
################################################################################
## Generated by alexpdev
##
## PySide6
##
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
## IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
## FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
## AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
## LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
## OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
## SOFTWARE.
################################################################################
import json
import sys
from PySide6.QtCore import *
from PySide6.QtGui import *
from PySide6.QtWidgets import *
from multitool.wordle import decypher_input, filter_word, filter_words
class Solver:
def __init__(self, l):
self.word_length = l
self.words = [i for i in json.load(open("Words_Length.json")) if len(i) == l]
self.hints = set()
self.wrong = set()
self.correct = ["" for _ in range(l)]
self.pos = {i: "" for i in range(l)}
def decypher(self, guess, result):
decypher_input(self.hints, self.wrong, self.correct, self.pos, guess, result)
def filter_words(self):
for word in self.words[::-1]:
if not filter_word(
word,
self.hints,
self.pos,
self.wrong,
self.correct,
):
self.words.remove(word)
return self.words
class Window(QMainWindow):
style = """
QWidget {
background-color: #333;
color: white;
font-size: 11pt;
}
QListWidget {
background-color: #124;
color: white;
}
QPushButton {
border: 1px solid white;
background-color: #242;
color: white;
margin-top: 5px;
padding: 3px;
}
QSpinBox {
background-color: #124;
color: white;
border: #999997 1px solid;
}
QToolButton {
border: 1px solid white;
background-color: #242;
color: white;
}
QLineEdit {
background-color: #368;
color: white;
border: #aaaaaa solid 1px;
}
"""
def __init__(self, parent=None):
super().__init__(parent=parent)
self.resize(354, 600)
self.setStyleSheet(self.style)
self.word_length = 5
self.solver = Solver(self.word_length)
self.central = QWidget(parent=self)
self.setWindowTitle("Wordle Helper")
self.setWindowIcon(QIcon("./assets/NYT-Wordle-Icon-32.png"))
self.setCentralWidget(self.central)
self.layout = QVBoxLayout()
self.central.setLayout(self.layout)
self.listwidget = QListWidget(parent=self)
self.toolbutton = QToolButton(parent=self)
self.toolbutton.setText("Start")
self.spinBox = SpinBox(parent=self)
self.word_length_label = QLabel("Word Length")
self.button = QPushButton("Submit")
self.hlayout1 = QHBoxLayout()
self.hlayout1.addWidget(self.word_length_label)
self.hlayout1.addWidget(self.spinBox)
self.layout.addLayout(self.hlayout1)
self.layout.addWidget(self.listwidget)
self.hlayout1.addWidget(self.toolbutton)
self.hlayout2 = QHBoxLayout()
self.char_edits = []
for i in range(self.word_length):
char_edit = CharEdit(parent=self, position=i)
char_edit.setMaxLength(1)
self.char_edits.append(char_edit)
self.hlayout2.addWidget(char_edit)
self.layout.addLayout(self.hlayout2)
self.layout.addWidget(self.button)
self.menubar = QMenuBar(parent=self)
self.menubar.setGeometry(QRect(0, 0, 354, 22))
self.setMenuBar(self.menubar)
self.statusbar = QStatusBar(parent=self)
self.setStatusBar(self.statusbar)
self.menubar.setObjectName("menubar")
self.setObjectName("MainWindow")
self.central.setObjectName("centralwidget")
self.layout.setObjectName("verticalLayout")
self.word_length_label.setObjectName("wordlengthlable")
self.hlayout1.setObjectName("horizontalLayout")
self.spinBox.setObjectName("spinBox")
self.listwidget.setObjectName("listWidget")
self.hlayout2.setObjectName("hlayout2")
self.button.setObjectName("pushButton")
self.statusbar.setObjectName("statusbar")
self.toolbutton.pressed.connect(self.start_wordle)
self.button.pressed.connect(self.filter_results)
def start_wordle(self):
starters = filter_words(self.word_length)
self.solver = Solver(self.word_length)
self.listwidget.addItems(starters)
def filter_results(self):
items = self.listwidget.selectedItems()
if len(items) != 1:
return
guess = items[0].text()
result = ""
for inp in self.char_edits:
t = inp.text()
if not t or t == " ":
return
result += t.upper()
self.solver.decypher(guess, result)
words = self.solver.filter_words()
self.listwidget.clear()
self.listwidget.addItems(words)
for box in self.char_edits:
box.clear()
class SpinBox(QSpinBox):
def __init__(self, parent=None):
super().__init__(parent=parent)
self.window = parent
self.setMaximum(15)
self.setMinimum(3)
self.setValue(self.window.word_length)
self.valueChanged.connect(self.edit_inputs)
def edit_inputs(self, val):
if val < self.window.word_length:
print(self.window.word_length)
diff = self.window.word_length - val
while diff > 0:
diff -= 1
print(diff, val, self.window.word_length)
widget = self.window.char_edits[-1]
self.window.char_edits = self.window.char_edits[:-1]
self.window.hlayout2.removeWidget(widget)
widget.hide()
del widget
self.window.word_length = len(self.window.char_edits)
else:
diff = val - self.window.word_length
while diff > 0:
lineedit = CharEdit(self.window)
self.window.char_edits.append(lineedit)
self.window.hlayout2.addWidget(lineedit)
self.window.word_length += 1
diff -= 1
self.window.solver = Solver(self.window.word_length)
class CharEdit(QLineEdit):
def __init__(self, parent=None, position=0):
super().__init__(parent=parent)
self.window = parent
self.position = position
self.setMaxLength(1)
self.textEdited.connect(self.select_next)
def grab_focus(self, i):
if self.position == i:
self.home(0)
self.selectAll()
def select_next(self, text):
if text == "":
return
self.focusNextChild()
def start_gui():
app = QApplication(sys.argv)
window = Window()
window.show()
app.exec()
| 33.090909
| 85
| 0.593681
|
0f60672e8d65b236183c32a95652143a11843ccb
| 1,452
|
py
|
Python
|
examples/PagePostCommentsEdge.py
|
GDGSNF/facebook-python-business-sdk
|
95e64a10d987d7a53963d17036b6730d07f84ab5
|
[
"CNRI-Python"
] | 576
|
2018-05-01T19:09:32.000Z
|
2022-03-31T11:45:11.000Z
|
examples/PagePostCommentsEdge.py
|
GDGSNF/facebook-python-business-sdk
|
95e64a10d987d7a53963d17036b6730d07f84ab5
|
[
"CNRI-Python"
] | 217
|
2018-05-03T07:31:59.000Z
|
2022-03-29T14:19:52.000Z
|
examples/PagePostCommentsEdge.py
|
GDGSNF/facebook-python-business-sdk
|
95e64a10d987d7a53963d17036b6730d07f84ab5
|
[
"CNRI-Python"
] | 323
|
2018-05-01T20:32:26.000Z
|
2022-03-29T07:05:12.000Z
|
# Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.pagepost import PagePost
from facebook_business.adobjects.comment import Comment
from facebook_business.api import FacebookAdsApi
access_token = '<ACCESS_TOKEN>'
app_secret = '<APP_SECRET>'
app_id = '<APP_ID>'
id = '<PAGE_POST_ID>'
FacebookAdsApi.init(access_token=access_token)
fields = [
]
params = {
}
print PagePost(id).get_comments(
fields=fields,
params=params,
)
| 38.210526
| 76
| 0.783747
|
79b503dcc89e695afad1d0092dc342f0d64e7b7f
| 257
|
py
|
Python
|
numpy_test/numpy_c.py
|
linyang23/AI-Python-Code-Implementations-with-Notes
|
af729c652a301b199d568d5989adc5ed6413dcf9
|
[
"MIT"
] | null | null | null |
numpy_test/numpy_c.py
|
linyang23/AI-Python-Code-Implementations-with-Notes
|
af729c652a301b199d568d5989adc5ed6413dcf9
|
[
"MIT"
] | null | null | null |
numpy_test/numpy_c.py
|
linyang23/AI-Python-Code-Implementations-with-Notes
|
af729c652a301b199d568d5989adc5ed6413dcf9
|
[
"MIT"
] | null | null | null |
# coding=utf-8
import numpy as np
nd5=np.random.random([3,3]) #生成3行3列的矩阵
print(nd5)
print(type(nd5))
'''输出
[[0.30996243 0.70525938 0.23778251]
[0.36607574 0.07691564 0.25879282]
[0.78231402 0.64058363 0.44167507]]
<class 'numpy.ndarray'>
'''
| 21.416667
| 50
| 0.673152
|
b0c6d7c62d22da661257a7c455f120a52cb537f0
| 948
|
py
|
Python
|
SmartAPI/rdf/List.py
|
Kreastr/SmartAPI-HEILA
|
97dbe9e6e27267c60a4f94f60692d5f391e2ef7f
|
[
"BSD-2-Clause"
] | null | null | null |
SmartAPI/rdf/List.py
|
Kreastr/SmartAPI-HEILA
|
97dbe9e6e27267c60a4f94f60692d5f391e2ef7f
|
[
"BSD-2-Clause"
] | null | null | null |
SmartAPI/rdf/List.py
|
Kreastr/SmartAPI-HEILA
|
97dbe9e6e27267c60a4f94f60692d5f391e2ef7f
|
[
"BSD-2-Clause"
] | null | null | null |
"""
A compatibility class that emulates an RDF List
"""
import sys
try:
from rdflib import Graph
from rdflib import BNode
except:
print "RDFLib is missing from your Python installation"
print "Install it with"
print "> pip install rdflib"
sys.exit()
class List(object):
def __init__(self):
self.elements = []
self.baseObject = None
def parse(self):
pass
def add_items(self, items):
if isinstance(items, list):
self.elements.extend(items)
else:
self.elements.append(items)
def get_items(self):
return self.elements
def get_item(self, index):
return self.elements[index]
def setBaseObject(self, baseObj):
self.baseObject = baseObj
def getBaseObject(self):
return self.baseObject
def hasBaseObject(self):
return self.baseObject is not None
| 21.545455
| 59
| 0.605485
|
c073538d8eb1c5c49f5d5903d2591edbbf218e0e
| 5,562
|
py
|
Python
|
models/model_zoo/modify_DeepLabV3plus.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
models/model_zoo/modify_DeepLabV3plus.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
models/model_zoo/modify_DeepLabV3plus.py
|
chansoopark98/hole-detection
|
76e47057b1e186d4e2f5db2b5b2bb1074b849078
|
[
"MIT"
] | null | null | null |
from tensorflow.keras.layers import (AveragePooling2D,
MaxPooling2D, SeparableConv2D, UpSampling2D, Activation, BatchNormalization,
GlobalAveragePooling2D, Conv2D, Dropout, Concatenate, multiply, Add, concatenate,
DepthwiseConv2D, Reshape, ZeroPadding2D, Dense, GlobalMaxPooling2D, Permute, Lambda, Subtract)
import tensorflow.keras.backend as K
import tensorflow as tf
import tensorflow_addons as tfa
MOMENTUM = 0.99
EPSILON = 1e-3
DECAY = None
BN = BatchNormalization
CONV_KERNEL_INITIALIZER = tf.keras.initializers.VarianceScaling(scale=1.0, mode="fan_out", distribution="truncated_normal")
atrous_rates= (6, 12, 18)
def deepLabV3Plus(features, activation='swish'):
skip1, x = features # c1 48 / c2 64
# Image Feature branch
shape_before = tf.shape(x)
b4 = GlobalAveragePooling2D()(x)
b4_shape = tf.keras.backend.int_shape(b4)
# from (b_size, channels)->(b_size, 1, 1, channels)
b4 = Reshape((1, 1, b4_shape[1]))(b4)
b4 = Conv2D(256, (1, 1), padding='same',
kernel_regularizer=DECAY,
use_bias=False, name='image_pooling')(b4)
b4 = BatchNormalization(name='image_pooling_BN', epsilon=1e-5)(b4)
b4 = Activation(activation)(b4)
# upsample. have to use compat because of the option align_corners
size_before = tf.keras.backend.int_shape(x)
b4 = tf.keras.layers.experimental.preprocessing.Resizing(
*size_before[1:3], interpolation="bilinear"
)(b4)
# b4 = UpSampling2D(size=(32, 64), interpolation="bilinear")(b4)
# simple 1x1
b0 = Conv2D(256, (1, 1), padding='same',
kernel_regularizer=DECAY,
use_bias=False, name='aspp0')(x)
# b0 = BatchNormalization(name='aspp0_BN', epsilon=1e-5)(b0)
b0 = BN(name='aspp0_BN', epsilon=1e-5)(b0)
b0 = Activation(activation, name='aspp0_activation')(b0)
b1 = SepConv_BN(x, 256, 'aspp1',
rate=atrous_rates[0], depth_activation=True, epsilon=1e-5)
# rate = 12 (24)
b2 = SepConv_BN(x, 256, 'aspp2',
rate=atrous_rates[1], depth_activation=True, epsilon=1e-5)
# rate = 18 (36)
b3 = SepConv_BN(x, 256, 'aspp3',
rate=atrous_rates[2], depth_activation=True, epsilon=1e-5)
# concatenate ASPP branches & project
x = Concatenate()([b4, b0, b1, b2, b3])
x = Conv2D(256, (1, 1), padding='same',
kernel_regularizer=DECAY,
use_bias=False, name='concat_projection')(x)
# x = BatchNormalization(name='concat_projection_BN', epsilon=1e-5)(x)
x = BN(name='concat_projection_BN', epsilon=1e-5)(x)
x = Activation(activation)(x)
x = Dropout(0.1)(x)
skip_size = tf.keras.backend.int_shape(skip1)
x = tf.keras.layers.experimental.preprocessing.Resizing(
*skip_size[1:3], interpolation="bilinear"
)(x)
# aux_temp_aspp = x
# x = UpSampling2D((4,4), interpolation='bilinear')(x)
dec_skip1 = Conv2D(48, (1, 1), padding='same',
kernel_regularizer=DECAY,
use_bias=False, name='feature_projection0')(skip1)
# dec_skip1 = BatchNormalization(
# name='feature_projection0_BN', epsilon=1e-5)(dec_skip1)
dec_skip1 = BN(
name='feature_projection0_BN', epsilon=1e-5)(dec_skip1)
dec_skip1 = Activation(activation)(dec_skip1)
x = Concatenate()([x, dec_skip1])
x = SepConv_BN(x, 256, 'decoder_conv0',
depth_activation=True, epsilon=1e-5)
x = SepConv_BN(x, 256, 'decoder_conv1',
depth_activation=True, epsilon=1e-5)
return x
def SepConv_BN(x, filters, prefix, stride=1, kernel_size=3, rate=1, depth_activation=False, epsilon=1e-3):
activation = 'swish'
""" SepConv with BN between depthwise & pointwise. Optionally add activation after BN
Implements right "same" padding for even kernel sizes
Args:
x: input tensor
filters: num of filters in pointwise convolution
prefix: prefix before name
stride: stride at depthwise conv
kernel_size: kernel size for depthwise convolution
rate: atrous rate for depthwise convolution
depth_activation: flag to use activation between depthwise & poinwise convs
epsilon: epsilon to use in BN layer
"""
if stride == 1:
depth_padding = 'same'
else:
kernel_size_effective = kernel_size + (kernel_size - 1) * (rate - 1)
pad_total = kernel_size_effective - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
x = ZeroPadding2D((pad_beg, pad_end))(x)
depth_padding = 'valid'
if not depth_activation:
x = Activation(activation)(x)
x = DepthwiseConv2D((kernel_size, kernel_size), strides=(stride, stride), dilation_rate=(rate, rate),
kernel_regularizer=DECAY,
padding=depth_padding, use_bias=False, name=prefix + '_depthwise')(x)
# x = BatchNormalization(name=prefix + '_depthwise_BN', epsilon=epsilon)(x)
x = BN(name=prefix + '_depthwise_BN', epsilon=epsilon)(x)
if depth_activation:
x = Activation(activation)(x)
x = Conv2D(filters, (1, 1), padding='same',
kernel_regularizer=DECAY,
use_bias=False, name=prefix + '_pointwise')(x)
# x = BatchNormalization(name=prefix + '_pointwise_BN', epsilon=epsilon)(x)
x = BN(name=prefix + '_pointwise_BN', epsilon=epsilon)(x)
if depth_activation:
x = Activation(activation)(x)
return x
| 41.819549
| 123
| 0.645811
|
f58626e3b57cd4ed8f5785a2dcc4cbd481159d28
| 506
|
py
|
Python
|
lib/ansiblelint/rules/LineTooLongRule.py
|
eahlstrom/ansible-lint
|
abc946f378a3690a4cb06eb9a7e9d60fb46fb35d
|
[
"MIT"
] | 1
|
2020-11-09T09:32:08.000Z
|
2020-11-09T09:32:08.000Z
|
lib/ansiblelint/rules/LineTooLongRule.py
|
eahlstrom/ansible-lint
|
abc946f378a3690a4cb06eb9a7e9d60fb46fb35d
|
[
"MIT"
] | 1
|
2019-03-23T14:17:22.000Z
|
2019-03-23T14:17:22.000Z
|
lib/ansiblelint/rules/LineTooLongRule.py
|
eahlstrom/ansible-lint
|
abc946f378a3690a4cb06eb9a7e9d60fb46fb35d
|
[
"MIT"
] | 1
|
2019-03-20T13:25:24.000Z
|
2019-03-20T13:25:24.000Z
|
# Copyright (c) 2016, Will Thames and contributors
# Copyright (c) 2018, Ansible Project
from ansiblelint import AnsibleLintRule
class LineTooLongRule(AnsibleLintRule):
id = '204'
shortdesc = 'Lines should be no longer than 160 chars'
description = (
'Long lines make code harder to read and '
'code review more difficult'
)
severity = 'VERY_LOW'
tags = ['formatting']
version_added = 'v4.0.0'
def match(self, file, line):
return len(line) > 160
| 25.3
| 58
| 0.660079
|
e5ba210fa9c2281972ddb38c9ec5957d10b22d22
| 38,618
|
py
|
Python
|
discord/app_commands/tree.py
|
Ryomen-Sukuna/discord.py
|
0bcb0d0e3ce395d42a5b1dae61b0090791ee018d
|
[
"MIT"
] | null | null | null |
discord/app_commands/tree.py
|
Ryomen-Sukuna/discord.py
|
0bcb0d0e3ce395d42a5b1dae61b0090791ee018d
|
[
"MIT"
] | null | null | null |
discord/app_commands/tree.py
|
Ryomen-Sukuna/discord.py
|
0bcb0d0e3ce395d42a5b1dae61b0090791ee018d
|
[
"MIT"
] | null | null | null |
"""
The MIT License (MIT)
Copyright (c) 2015-present Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from __future__ import annotations
import inspect
import sys
import traceback
from typing import (
Any,
TYPE_CHECKING,
Callable,
Coroutine,
Dict,
Generator,
Generic,
List,
Literal,
Optional,
Set,
Tuple,
TypeVar,
Union,
overload,
)
from collections import Counter
from .namespace import Namespace, ResolveKey
from .models import AppCommand
from .commands import Command, ContextMenu, Group, _shorten
from .errors import (
AppCommandError,
CommandAlreadyRegistered,
CommandNotFound,
CommandSignatureMismatch,
)
from ..errors import ClientException
from ..enums import AppCommandType, InteractionType
from ..utils import MISSING, _get_as_snowflake, _is_submodule
if TYPE_CHECKING:
from ..types.interactions import ApplicationCommandInteractionData, ApplicationCommandInteractionDataOption
from ..interactions import Interaction
from ..client import Client
from ..abc import Snowflake
from .commands import ContextMenuCallback, CommandCallback, P, T
ErrorFunc = Callable[
[
Interaction,
Optional[Union[ContextMenu, Command[Any, ..., Any]]],
AppCommandError,
],
Coroutine[Any, Any, Any],
]
__all__ = ('CommandTree',)
ClientT = TypeVar('ClientT', bound='Client')
APP_ID_NOT_FOUND = (
'Client does not have an application_id set. Either the function was called before on_ready '
'was called or application_id was not passed to the Client constructor.'
)
def _retrieve_guild_ids(
command: Any, guild: Optional[Snowflake] = MISSING, guilds: List[Snowflake] = MISSING
) -> Optional[Set[int]]:
if guild is not MISSING and guilds is not MISSING:
raise TypeError('cannot mix guild and guilds keyword arguments')
# guilds=[] or guilds=[...]
if guild is MISSING:
# If no arguments are given then it should default to the ones
# given to the guilds(...) decorator or None for global.
if guilds is MISSING:
return getattr(command, '_guild_ids', None)
# guilds=[] is the same as global
if len(guilds) == 0:
return None
return {g.id for g in guilds}
# At this point it should be...
# guild=None or guild=Object
if guild is None:
return None
return {guild.id}
class CommandTree(Generic[ClientT]):
"""Represents a container that holds application command information.
Parameters
-----------
client: :class:`~discord.Client`
The client instance to get application command information from.
"""
def __init__(self, client: ClientT):
self.client: ClientT = client
self._http = client.http
self._state = client._connection
if self._state._command_tree is not None:
raise ClientException('This client already has an associated command tree.')
self._state._command_tree = self
self._guild_commands: Dict[int, Dict[str, Union[Command, Group]]] = {}
self._global_commands: Dict[str, Union[Command, Group]] = {}
# (name, guild_id, command_type): Command
# The above two mappings can use this structure too but we need fast retrieval
# by name and guild_id in the above case while here it isn't as important since
# it's uncommon and N=5 anyway.
self._context_menus: Dict[Tuple[str, Optional[int], int], ContextMenu] = {}
async def fetch_commands(self, *, guild: Optional[Snowflake] = None) -> List[AppCommand]:
"""|coro|
Fetches the application's current commands.
If no guild is passed then global commands are fetched, otherwise
the guild's commands are fetched instead.
.. note::
This includes context menu commands.
Parameters
-----------
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to fetch the commands from. If not passed then global commands
are fetched instead.
Raises
-------
HTTPException
Fetching the commands failed.
ClientException
The application ID could not be found.
Returns
--------
List[:class:`~discord.app_commands.AppCommand`]
The application's commands.
"""
if self.client.application_id is None:
raise ClientException(APP_ID_NOT_FOUND)
if guild is None:
commands = await self._http.get_global_commands(self.client.application_id)
else:
commands = await self._http.get_guild_commands(self.client.application_id, guild.id)
return [AppCommand(data=data, state=self._state) for data in commands]
def copy_global_to(self, *, guild: Snowflake) -> None:
"""Copies all global commands to the specified guild.
This method is mainly available for development purposes, as it allows you
to copy your global commands over to a testing guild easily and prevent waiting
an hour for the propagation.
Note that this method will *override* pre-existing guild commands that would conflict.
Parameters
-----------
guild: :class:`~discord.abc.Snowflake`
The guild to copy the commands to.
Raises
--------
ValueError
The maximum number of commands was reached for that guild.
This is currently 100 for slash commands and 5 for context menu commands.
"""
try:
mapping = self._guild_commands[guild.id].copy()
except KeyError:
mapping = {}
mapping.update(self._global_commands)
if len(mapping) > 100:
raise ValueError('maximum number of slash commands exceeded (100)')
ctx_menu: Dict[Tuple[str, Optional[int], int], ContextMenu] = {
(name, guild.id, cmd_type): cmd
for ((name, g, cmd_type), cmd) in self._context_menus.items()
if g is None or g == guild.id
}
counter = Counter(cmd_type for _, _, cmd_type in ctx_menu)
for cmd_type, count in counter.items():
if count > 5:
as_enum = AppCommandType(cmd_type)
raise ValueError(f'maximum number of context menu commands exceeded (5) for type {as_enum!s}')
self._context_menus.update(ctx_menu)
self._guild_commands[guild.id] = mapping
def add_command(
self,
command: Union[Command[Any, ..., Any], ContextMenu, Group],
/,
*,
guild: Optional[Snowflake] = MISSING,
guilds: List[Snowflake] = MISSING,
override: bool = False,
) -> None:
"""Adds an application command to the tree.
This only adds the command locally -- in order to sync the commands
and enable them in the client, :meth:`sync` must be called.
The root parent of the command is added regardless of the type passed.
Parameters
-----------
command: Union[:class:`Command`, :class:`Group`]
The application command or group to add.
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to add the command to. If not given then it
becomes a global command instead.
guilds: List[:class:`~discord.abc.Snowflake`]
The list of guilds to add the command to. This cannot be mixed
with the ``guild`` parameter. If no guilds are given at all
then it becomes a global command instead.
override: :class:`bool`
Whether to override a command with the same name. If ``False``
an exception is raised. Default is ``False``.
Raises
--------
~discord.app_commands.CommandAlreadyRegistered
The command was already registered and no override was specified.
TypeError
The application command passed is not a valid application command.
Or, ``guild`` and ``guilds`` were both given.
ValueError
The maximum number of commands was reached globally or for that guild.
This is currently 100 for slash commands and 5 for context menu commands.
"""
guild_ids = _retrieve_guild_ids(command, guild, guilds)
if isinstance(command, ContextMenu):
type = command.type.value
name = command.name
def _context_menu_add_helper(
guild_id: Optional[int],
data: Dict[Tuple[str, Optional[int], int], ContextMenu],
name: str = name,
type: int = type,
) -> None:
key = (name, guild_id, type)
found = key in self._context_menus
if found and not override:
raise CommandAlreadyRegistered(name, guild_id)
total = sum(1 for _, g, t in self._context_menus if g == guild_id and t == type)
if total + found > 5:
raise ValueError('maximum number of context menu commands exceeded (5)')
data[key] = command
if guild_ids is None:
_context_menu_add_helper(None, self._context_menus)
else:
current: Dict[Tuple[str, Optional[int], int], ContextMenu] = {}
for guild_id in guild_ids:
_context_menu_add_helper(guild_id, current)
# Update at the end in order to make sure the update is atomic.
# An error during addition could end up making the context menu mapping
# have a partial state
self._context_menus.update(current)
return
elif not isinstance(command, (Command, Group)):
raise TypeError(f'Expected a application command, received {command.__class__!r} instead')
# todo: validate application command groups having children (required)
root = command.root_parent or command
name = root.name
if guild_ids is not None:
# Validate that the command can be added first, before actually
# adding it into the mapping. This ensures atomicity.
for guild_id in guild_ids:
commands = self._guild_commands.get(guild_id, {})
found = name in commands
if found and not override:
raise CommandAlreadyRegistered(name, guild_id)
if len(commands) + found > 100:
raise ValueError(f'maximum number of slash commands exceeded (100) for guild_id {guild_id}')
# Actually add the command now that it has been verified to be okay.
for guild_id in guild_ids:
commands = self._guild_commands.setdefault(guild_id, {})
commands[name] = root
else:
found = name in self._global_commands
if found and not override:
raise CommandAlreadyRegistered(name, None)
if len(self._global_commands) + found > 100:
raise ValueError('maximum number of global slash commands exceeded (100)')
self._global_commands[name] = root
@overload
def remove_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.message, AppCommandType.user],
) -> Optional[ContextMenu]:
...
@overload
def remove_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.chat_input] = ...,
) -> Optional[Union[Command[Any, ..., Any], Group]]:
...
@overload
def remove_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: AppCommandType,
) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]:
...
def remove_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = None,
type: AppCommandType = AppCommandType.chat_input,
) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]:
"""Removes an application command from the tree.
This only removes the command locally -- in order to sync the commands
and remove them in the client, :meth:`sync` must be called.
Parameters
-----------
command: :class:`str`
The name of the root command to remove.
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to remove the command from. If not given then it
removes a global command instead.
type: :class:`~discord.AppCommandType`
The type of command to remove. Defaults to :attr:`~discord.AppCommandType.chat_input`,
i.e. slash commands.
Returns
---------
Optional[Union[:class:`Command`, :class:`ContextMenu`, :class:`Group`]]
The application command that got removed.
If nothing was removed then ``None`` is returned instead.
"""
if type is AppCommandType.chat_input:
if guild is None:
return self._global_commands.pop(command, None)
else:
try:
commands = self._guild_commands[guild.id]
except KeyError:
return None
else:
return commands.pop(command, None)
elif type in (AppCommandType.user, AppCommandType.message):
guild_id = None if guild is None else guild.id
key = (command, guild_id, type.value)
return self._context_menus.pop(key, None)
@overload
def get_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.message, AppCommandType.user],
) -> Optional[ContextMenu]:
...
@overload
def get_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.chat_input] = ...,
) -> Optional[Union[Command[Any, ..., Any], Group]]:
...
@overload
def get_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = ...,
type: AppCommandType,
) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]:
...
def get_command(
self,
command: str,
/,
*,
guild: Optional[Snowflake] = None,
type: AppCommandType = AppCommandType.chat_input,
) -> Optional[Union[Command[Any, ..., Any], ContextMenu, Group]]:
"""Gets a application command from the tree.
Parameters
-----------
command: :class:`str`
The name of the root command to get.
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to get the command from. If not given then it
gets a global command instead.
type: :class:`~discord.AppCommandType`
The type of command to get. Defaults to :attr:`~discord.AppCommandType.chat_input`,
i.e. slash commands.
Returns
---------
Optional[Union[:class:`Command`, :class:`ContextMenu`, :class:`Group`]]
The application command that was found.
If nothing was found then ``None`` is returned instead.
"""
if type is AppCommandType.chat_input:
if guild is None:
return self._global_commands.get(command)
else:
try:
commands = self._guild_commands[guild.id]
except KeyError:
return None
else:
return commands.get(command)
elif type in (AppCommandType.user, AppCommandType.message):
guild_id = None if guild is None else guild.id
key = (command, guild_id, type.value)
return self._context_menus.get(key)
@overload
def get_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.message, AppCommandType.user],
) -> List[ContextMenu]:
...
@overload
def get_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.chat_input] = ...,
) -> List[Union[Command[Any, ..., Any], Group]]:
...
@overload
def get_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: AppCommandType,
) -> Union[List[Union[Command[Any, ..., Any], Group]], List[ContextMenu]]:
...
def get_commands(
self,
*,
guild: Optional[Snowflake] = None,
type: AppCommandType = AppCommandType.chat_input,
) -> Union[List[Union[Command[Any, ..., Any], Group]], List[ContextMenu]]:
"""Gets all application commands from the tree.
Parameters
-----------
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to get the commands from. If not given then it
gets all global commands instead.
type: :class:`~discord.AppCommandType`
The type of commands to get. Defaults to :attr:`~discord.AppCommandType.chat_input`,
i.e. slash commands.
Returns
---------
Union[List[:class:`ContextMenu`], List[Union[:class:`Command`, :class:`Group`]]
The application commands from the tree.
"""
if type is AppCommandType.chat_input:
if guild is None:
return list(self._global_commands.values())
else:
try:
commands = self._guild_commands[guild.id]
except KeyError:
return []
else:
return list(commands.values())
else:
guild_id = None if guild is None else guild.id
value = type.value
return [command for ((_, g, t), command) in self._context_menus.items() if g == guild_id and t == value]
@overload
def walk_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.message, AppCommandType.user],
) -> Generator[ContextMenu, None, None]:
...
@overload
def walk_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: Literal[AppCommandType.chat_input] = ...,
) -> Generator[Union[Command[Any, ..., Any], Group], None, None]:
...
@overload
def walk_commands(
self,
*,
guild: Optional[Snowflake] = ...,
type: AppCommandType,
) -> Union[Generator[Union[Command[Any, ..., Any], Group], None, None], Generator[ContextMenu, None, None]]:
...
def walk_commands(
self,
*,
guild: Optional[Snowflake] = None,
type: AppCommandType = AppCommandType.chat_input,
) -> Union[Generator[Union[Command[Any, ..., Any], Group], None, None], Generator[ContextMenu, None, None]]:
"""An iterator that recursively walks through all application commands and child commands from the tree.
Parameters
-----------
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to iterate the commands from. If not given then it
iterates all global commands instead.
type: :class:`~discord.AppCommandType`
The type of commands to iterate over. Defaults to :attr:`~discord.AppCommandType.chat_input`,
i.e. slash commands.
Yields
---------
Union[:class:`ContextMenu`, :class:`Command`, :class:`Group`]
The application commands from the tree.
"""
if type is AppCommandType.chat_input:
if guild is None:
for cmd in self._global_commands.values():
yield cmd
if isinstance(cmd, Group):
yield from cmd.walk_commands()
else:
try:
commands = self._guild_commands[guild.id]
except KeyError:
return
else:
for cmd in commands.values():
yield cmd
if isinstance(cmd, Group):
yield from cmd.walk_commands()
else:
guild_id = None if guild is None else guild.id
value = type.value
for ((_, g, t), command) in self._context_menus.items():
if g == guild_id and t == value:
yield command
def _get_all_commands(
self, *, guild: Optional[Snowflake] = None
) -> List[Union[Command[Any, ..., Any], Group, ContextMenu]]:
if guild is None:
base: List[Union[Command[Any, ..., Any], Group, ContextMenu]] = list(self._global_commands.values())
base.extend(cmd for ((_, g, _), cmd) in self._context_menus.items() if g is None)
return base
else:
try:
commands = self._guild_commands[guild.id]
except KeyError:
guild_id = guild.id
return [cmd for ((_, g, _), cmd) in self._context_menus.items() if g == guild_id]
else:
base: List[Union[Command[Any, ..., Any], Group, ContextMenu]] = list(commands.values())
guild_id = guild.id
base.extend(cmd for ((_, g, _), cmd) in self._context_menus.items() if g == guild_id)
return base
def _remove_with_module(self, name: str) -> None:
remove: List[Any] = []
for key, cmd in self._context_menus.items():
if cmd.module is not None and _is_submodule(name, cmd.module):
remove.append(key)
for key in remove:
del self._context_menus[key]
remove = []
for key, cmd in self._global_commands.items():
if cmd.module is not None and _is_submodule(name, cmd.module):
remove.append(key)
for key in remove:
del self._global_commands[key]
for mapping in self._guild_commands.values():
remove = []
for key, cmd in mapping.items():
if cmd.module is not None and _is_submodule(name, cmd.module):
remove.append(key)
for key in remove:
del mapping[key]
async def on_error(
self,
interaction: Interaction,
command: Optional[Union[ContextMenu, Command[Any, ..., Any]]],
error: AppCommandError,
) -> None:
"""|coro|
A callback that is called when any command raises an :exc:`AppCommandError`.
The default implementation prints the traceback to stderr if the command does
not have any error handlers attached to it.
Parameters
-----------
interaction: :class:`~discord.Interaction`
The interaction that is being handled.
command: Optional[Union[:class:`~discord.app_commands.Command`, :class:`~discord.app_commands.ContextMenu`]]
The command that failed, if any.
error: :exc:`AppCommandError`
The exception that was raised.
"""
if command is not None:
if command._has_any_error_handlers():
return
print(f'Ignoring exception in command {command.name!r}:', file=sys.stderr)
else:
print(f'Ignoring exception in command tree:', file=sys.stderr)
traceback.print_exception(error.__class__, error, error.__traceback__, file=sys.stderr)
def error(self, coro: ErrorFunc) -> ErrorFunc:
"""A decorator that registers a coroutine as a local error handler.
This must match the signature of the :meth:`on_error` callback.
The error passed will be derived from :exc:`AppCommandError`.
Parameters
-----------
coro: :ref:`coroutine <coroutine>`
The coroutine to register as the local error handler.
Raises
-------
TypeError
The coroutine passed is not actually a coroutine or does
not match the signature.
"""
if not inspect.iscoroutinefunction(coro):
raise TypeError('The error handler must be a coroutine.')
params = inspect.signature(coro).parameters
if len(params) != 3:
raise TypeError('error handler must have 3 parameters')
# Type checker doesn't like overriding methods like this
self.on_error = coro # type: ignore
return coro
def command(
self,
*,
name: str = MISSING,
description: str = MISSING,
guild: Optional[Snowflake] = MISSING,
guilds: List[Snowflake] = MISSING,
) -> Callable[[CommandCallback[Group, P, T]], Command[Group, P, T]]:
"""Creates an application command directly under this tree.
Parameters
------------
name: :class:`str`
The name of the application command. If not given, it defaults to a lower-case
version of the callback name.
description: :class:`str`
The description of the application command. This shows up in the UI to describe
the application command. If not given, it defaults to the first line of the docstring
of the callback shortened to 100 characters.
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to add the command to. If not given then it
becomes a global command instead.
guilds: List[:class:`~discord.abc.Snowflake`]
The list of guilds to add the command to. This cannot be mixed
with the ``guild`` parameter. If no guilds are given at all
then it becomes a global command instead.
"""
def decorator(func: CommandCallback[Group, P, T]) -> Command[Group, P, T]:
if not inspect.iscoroutinefunction(func):
raise TypeError('command function must be a coroutine function')
if description is MISSING:
if func.__doc__ is None:
desc = '…'
else:
desc = _shorten(func.__doc__)
else:
desc = description
command = Command(
name=name if name is not MISSING else func.__name__,
description=desc,
callback=func,
parent=None,
)
self.add_command(command, guild=guild, guilds=guilds)
return command
return decorator
def context_menu(
self,
*,
name: str = MISSING,
guild: Optional[Snowflake] = MISSING,
guilds: List[Snowflake] = MISSING,
) -> Callable[[ContextMenuCallback], ContextMenu]:
"""Creates a application command context menu from a regular function directly under this tree.
This function must have a signature of :class:`~discord.Interaction` as its first parameter
and taking either a :class:`~discord.Member`, :class:`~discord.User`, or :class:`~discord.Message`,
or a :obj:`typing.Union` of ``Member`` and ``User`` as its second parameter.
Examples
---------
.. code-block:: python3
@app_commands.context_menu()
async def react(interaction: discord.Interaction, message: discord.Message):
await interaction.response.send_message('Very cool message!', ephemeral=True)
@app_commands.context_menu()
async def ban(interaction: discord.Interaction, user: discord.Member):
await interaction.response.send_message(f'Should I actually ban {user}...', ephemeral=True)
Parameters
------------
name: :class:`str`
The name of the context menu command. If not given, it defaults to a title-case
version of the callback name. Note that unlike regular slash commands this can
have spaces and upper case characters in the name.
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to add the command to. If not given then it
becomes a global command instead.
guilds: List[:class:`~discord.abc.Snowflake`]
The list of guilds to add the command to. This cannot be mixed
with the ``guild`` parameter. If no guilds are given at all
then it becomes a global command instead.
"""
def decorator(func: ContextMenuCallback) -> ContextMenu:
if not inspect.iscoroutinefunction(func):
raise TypeError('context menu function must be a coroutine function')
actual_name = func.__name__.title() if name is MISSING else name
context_menu = ContextMenu(name=actual_name, callback=func)
self.add_command(context_menu, guild=guild, guilds=guilds)
return context_menu
return decorator
async def sync(self, *, guild: Optional[Snowflake] = None) -> List[AppCommand]:
"""|coro|
Syncs the application commands to Discord.
This must be called for the application commands to show up.
Global commands take up to 1-hour to propagate but guild
commands propagate instantly.
Parameters
-----------
guild: Optional[:class:`~discord.abc.Snowflake`]
The guild to sync the commands to. If ``None`` then it
syncs all global commands instead.
Raises
-------
HTTPException
Syncing the commands failed.
ClientException
The client does not have an application ID.
Returns
--------
List[:class:`AppCommand`]
The application's commands that got synced.
"""
if self.client.application_id is None:
raise ClientException(APP_ID_NOT_FOUND)
commands = self._get_all_commands(guild=guild)
payload = [command.to_dict() for command in commands]
if guild is None:
data = await self._http.bulk_upsert_global_commands(self.client.application_id, payload=payload)
else:
data = await self._http.bulk_upsert_guild_commands(self.client.application_id, guild.id, payload=payload)
return [AppCommand(data=d, state=self._state) for d in data]
def _from_interaction(self, interaction: Interaction):
async def wrapper():
try:
await self.call(interaction)
except AppCommandError as e:
await self.on_error(interaction, None, e)
self.client.loop.create_task(wrapper(), name='CommandTree-invoker')
def _get_context_menu(self, data: ApplicationCommandInteractionData) -> Optional[ContextMenu]:
name = data['name']
guild_id = _get_as_snowflake(data, 'guild_id')
return self._context_menus.get((name, guild_id, data.get('type', 1)))
def _get_app_command_options(
self, data: ApplicationCommandInteractionData
) -> Tuple[Command[Any, ..., Any], List[ApplicationCommandInteractionDataOption]]:
parents: List[str] = []
name = data['name']
command_guild_id = _get_as_snowflake(data, 'guild_id')
if command_guild_id:
try:
guild_commands = self._guild_commands[command_guild_id]
except KeyError:
command = None
else:
command = guild_commands.get(name)
else:
command = self._global_commands.get(name)
# If it's not found at this point then it's not gonna be found at any point
if command is None:
raise CommandNotFound(name, parents)
# This could be done recursively but it'd be a bother due to the state needed
# to be tracked above like the parents, the actual command type, and the
# resulting options we care about
searching = True
options: List[ApplicationCommandInteractionDataOption] = data.get('options', [])
while searching:
for option in options:
# Find subcommands
if option.get('type', 0) in (1, 2):
parents.append(name)
name = option['name']
command = command._get_internal_command(name)
if command is None:
raise CommandNotFound(name, parents)
options = option.get('options', [])
break
else:
searching = False
break
else:
break
if isinstance(command, Group):
# Right now, groups can't be invoked. This is a Discord limitation in how they
# do slash commands. So if we're here and we have a Group rather than a Command instance
# then something in the code is out of date from the data that Discord has.
raise CommandSignatureMismatch(command)
return (command, options)
async def _call_context_menu(self, interaction: Interaction, data: ApplicationCommandInteractionData, type: int) -> None:
name = data['name']
guild_id = _get_as_snowflake(data, 'guild_id')
ctx_menu = self._context_menus.get((name, guild_id, type))
# Pre-fill the cached slot to prevent re-computation
interaction._cs_command = ctx_menu
if ctx_menu is None:
raise CommandNotFound(name, [], AppCommandType(type))
resolved = Namespace._get_resolved_items(interaction, data.get('resolved', {}))
target_id = data.get('target_id')
# Right now, the only types are message and user
# Therefore, there's no conflict with snowflakes
# This will always work at runtime
key = ResolveKey.any_with(target_id) # type: ignore
value = resolved.get(key)
if ctx_menu.type.value != type:
raise CommandSignatureMismatch(ctx_menu)
if value is None:
raise AppCommandError('This should not happen if Discord sent well-formed data.')
# I assume I don't have to type check here.
try:
await ctx_menu._invoke(interaction, value)
except AppCommandError as e:
if ctx_menu.on_error is not None:
await ctx_menu.on_error(interaction, e)
await self.on_error(interaction, ctx_menu, e)
async def interaction_check(self, interaction: Interaction, /) -> bool:
"""|coro|
A global check to determine if an :class:`~discord.Interaction` should
be processed by the tree.
The default implementation returns True (all interactions are processed),
but can be overridden if custom behaviour is desired.
"""
return True
async def call(self, interaction: Interaction) -> None:
"""|coro|
Given an :class:`~discord.Interaction`, calls the matching
application command that's being invoked.
This is usually called automatically by the library.
Parameters
-----------
interaction: :class:`~discord.Interaction`
The interaction to dispatch from.
Raises
--------
CommandNotFound
The application command referred to could not be found.
CommandSignatureMismatch
The interaction data referred to a parameter that was not found in the
application command definition.
AppCommandError
An error occurred while calling the command.
"""
if not await self.interaction_check(interaction):
return
data: ApplicationCommandInteractionData = interaction.data # type: ignore
type = data.get('type', 1)
if type != 1:
# Context menu command...
await self._call_context_menu(interaction, data, type)
return
command, options = self._get_app_command_options(data)
# Pre-fill the cached slot to prevent re-computation
interaction._cs_command = command
# At this point options refers to the arguments of the command
# and command refers to the class type we care about
namespace = Namespace(interaction, data.get('resolved', {}), options)
# Same pre-fill as above
interaction._cs_namespace = namespace
# Auto complete handles the namespace differently... so at this point this is where we decide where that is.
if interaction.type is InteractionType.autocomplete:
focused = next((opt['name'] for opt in options if opt.get('focused')), None)
if focused is None:
raise AppCommandError('This should not happen, but there is no focused element. This is a Discord bug.')
await command._invoke_autocomplete(interaction, focused, namespace)
return
try:
await command._invoke_with_namespace(interaction, namespace)
except AppCommandError as e:
await command._invoke_error_handler(interaction, e)
await self.on_error(interaction, command, e)
| 36.990421
| 125
| 0.597856
|
8bbf20eb5b4992ca2bfa33ae48b09b8b29d4a7dc
| 715
|
py
|
Python
|
nodes/Input/MaterialInput.py
|
kant/RenderStackNode
|
19876fc75a03edf36ae27837d193509907adbd4a
|
[
"Apache-2.0"
] | null | null | null |
nodes/Input/MaterialInput.py
|
kant/RenderStackNode
|
19876fc75a03edf36ae27837d193509907adbd4a
|
[
"Apache-2.0"
] | null | null | null |
nodes/Input/MaterialInput.py
|
kant/RenderStackNode
|
19876fc75a03edf36ae27837d193509907adbd4a
|
[
"Apache-2.0"
] | null | null | null |
import bpy
from bpy.props import *
from ...nodes.BASE.node_tree import RenderStackNode
# from ...utility import source_attr
from mathutils import Color, Vector
def update_node(self, context):
self.update_parms()
class RenderNodeObjectInput(RenderStackNode):
bl_idname = 'RenderNodeMaterialInput'
bl_label = 'Material Input +'
value: PointerProperty(type=bpy.types.Material)
def init(self, context):
self.outputs.new('NodeSocketMaterial', "Output")
def draw_buttons(self, context, layout):
layout.prop(self, 'value',text='')
def register():
bpy.utils.register_class(RenderNodeObjectInput)
def unregister():
bpy.utils.unregister_class(RenderNodeObjectInput)
| 23.064516
| 56
| 0.735664
|
f06bc2106e8b9a64eb3a8979f4267138b34906d1
| 483
|
py
|
Python
|
events/contrib/plugins/form_elements/fields/text/apps.py
|
mansonul/events
|
4f6ca37bc600dcba3f74400d299826882d53b7d2
|
[
"MIT"
] | null | null | null |
events/contrib/plugins/form_elements/fields/text/apps.py
|
mansonul/events
|
4f6ca37bc600dcba3f74400d299826882d53b7d2
|
[
"MIT"
] | null | null | null |
events/contrib/plugins/form_elements/fields/text/apps.py
|
mansonul/events
|
4f6ca37bc600dcba3f74400d299826882d53b7d2
|
[
"MIT"
] | null | null | null |
__title__ = 'fobi.contrib.plugins.form_elements.fields.text.apps'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('Config',)
try:
from django.apps import AppConfig
class Config(AppConfig):
"""Config."""
name = 'fobi.contrib.plugins.form_elements.fields.text'
label = 'fobi_contrib_plugins_form_elements_fields_text'
except ImportError:
pass
| 26.833333
| 65
| 0.720497
|
c7f3ff1c8591be846553ec4cb59b6ac74cd7c11d
| 2,959
|
py
|
Python
|
activemri/experimental/cvpr19_models/util/common.py
|
qinliuliuqin/active-mri-acquisition
|
b561f838667f4bc7753b1f89dfbdd545d0f00ada
|
[
"MIT"
] | 32
|
2020-10-05T19:46:56.000Z
|
2022-02-15T10:37:13.000Z
|
activemri/experimental/cvpr19_models/util/common.py
|
qinliuliuqin/active-mri-acquisition
|
b561f838667f4bc7753b1f89dfbdd545d0f00ada
|
[
"MIT"
] | 2
|
2020-12-28T21:05:57.000Z
|
2022-01-22T17:56:50.000Z
|
activemri/experimental/cvpr19_models/util/common.py
|
qinliuliuqin/active-mri-acquisition
|
b561f838667f4bc7753b1f89dfbdd545d0f00ada
|
[
"MIT"
] | 9
|
2020-11-22T18:15:04.000Z
|
2022-02-11T06:07:38.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
import os
from typing import Dict, Optional
import matplotlib.pyplot as plt
import numpy as np
import skimage.measure
import torch
import torchvision.utils as tvutil
def load_checkpoint(checkpoint_path: str) -> Optional[Dict]:
if os.path.isfile(checkpoint_path):
logging.info(f"Found checkpoint at {checkpoint_path}.")
return torch.load(checkpoint_path)
logging.info(f"No checkpoint found at {checkpoint_path}.")
return None
def compute_ssims(xs, ys):
ssims = []
for i in range(xs.shape[0]):
ssim = skimage.measure.compare_ssim(
xs[i, 0].cpu().numpy(),
ys[i, 0].cpu().numpy(),
data_range=ys[i, 0].cpu().numpy().max(),
)
ssims.append(ssim)
return np.array(ssims).mean()
def compute_psnrs(xs, ys):
psnrs = []
for i in range(xs.shape[0]):
psnr = skimage.measure.compare_psnr(
xs[i, 0].cpu().numpy(),
ys[i, 0].cpu().numpy(),
data_range=ys[i, 0].cpu().numpy().max(),
)
psnrs.append(psnr)
return np.array(psnrs).mean()
def compute_mse(xs, ys):
return np.mean((ys.cpu().numpy() - xs.cpu().numpy()) ** 2)
def compute_nmse(xs, ys):
ys_numpy = ys.cpu().numpy()
return (
np.linalg.norm(ys_numpy - xs.cpu().numpy()) ** 2 / np.linalg.norm(ys_numpy) ** 2
)
# Converts a Tensor into an image array (numpy)
# |imtype|: the desired type of the converted numpy array
def tensor2im(input_image, imtype=np.uint8, renormalize=True):
if isinstance(input_image, torch.Tensor):
image_tensor = input_image.data
else:
return input_image
# do normalization first, since we working on fourier space. we need to clamp
if renormalize:
image_tensor.add_(1).div_(2)
image_tensor.mul_(255).clamp_(0, 255)
if len(image_tensor.shape) == 4:
image_numpy = image_tensor[0].cpu().float().numpy()
else:
image_numpy = image_tensor.cpu().float().numpy()
if image_numpy.shape[0] == 1:
image_numpy = np.tile(image_numpy, (3, 1, 1))
return image_numpy.astype(imtype)
def create_grid_from_tensor(tensor_of_images, num_rows=4):
# take norm over real-imaginary dimension
# tensor_of_images = tensor_of_images.norm(dim=1, keepdim=True)
# make image grid
tensor_grid = tvutil.make_grid(
tensor_of_images, nrow=num_rows, normalize=True, scale_each=False
)
numpy_grid = tensor2im(tensor_grid, renormalize=False)
return numpy_grid
def gray2heatmap(grayimg, cmap="jet"):
cmap = plt.get_cmap(cmap)
rgba_img = cmap(grayimg)
# rgb_img = np.delete(rgba_img, 3, 2) * 255.0
rgb_img = rgba_img[:, :, :, 0] * 255.0
rgb_img = rgb_img.astype(np.uint8)
return rgb_img
| 28.180952
| 88
| 0.650896
|
01065da7bc8387efa4d9672a50480c45b32ed9b1
| 871
|
py
|
Python
|
regreg/atoms/setup.py
|
sroet/regreg
|
299ff18b8680872d4d85447953793bf438f78bba
|
[
"BSD-3-Clause"
] | 9
|
2015-07-14T12:39:10.000Z
|
2022-02-03T11:13:03.000Z
|
regreg/atoms/setup.py
|
sroet/regreg
|
299ff18b8680872d4d85447953793bf438f78bba
|
[
"BSD-3-Clause"
] | 2
|
2015-02-24T18:05:58.000Z
|
2020-12-29T11:58:58.000Z
|
regreg/atoms/setup.py
|
sroet/regreg
|
299ff18b8680872d4d85447953793bf438f78bba
|
[
"BSD-3-Clause"
] | 8
|
2015-07-13T07:59:16.000Z
|
2021-06-23T09:21:43.000Z
|
"""
RegReg: A package to solve regularized regression problems
"""
import os, sys
import string
from Cython.Compiler import Main
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('atoms',parent_package,top_path)
config.add_extension('projl1_cython',
sources = ["projl1_cython.c"],
)
config.add_extension('mixed_lasso_cython',
sources = ["mixed_lasso_cython.c"],
)
config.add_extension('piecewise_linear',
sources = ["piecewise_linear.c"],
)
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
c = configuration(top_path='',
).todict()
setup(**c)
| 26.393939
| 60
| 0.58209
|
718fabbb99761f63996fa7200e11fe550908e4ae
| 6,866
|
py
|
Python
|
ForgeShortUrl/forgeshorturl/tests/functional/test.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | 113
|
2015-03-25T10:33:37.000Z
|
2022-02-16T20:55:06.000Z
|
ForgeShortUrl/forgeshorturl/tests/functional/test.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | 4
|
2017-08-04T16:19:07.000Z
|
2020-06-08T19:01:33.000Z
|
ForgeShortUrl/forgeshorturl/tests/functional/test.py
|
rohankumardubey/allura
|
9c490a051ca912d28b81ce656441d6fed100cb24
|
[
"Apache-2.0"
] | 36
|
2015-08-14T16:27:39.000Z
|
2022-02-16T20:54:35.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import absolute_import
from tg import tmpl_context as c
from tg import config
from alluratest.tools import assert_equal
import mock
from allura.lib import helpers as h
from allura.tests import decorators as td
from alluratest.controller import TestController
from forgeshorturl.model import ShortUrl
class TestRootController(TestController):
def setUp(self):
super(TestRootController, self).setUp()
self.setup_with_tools()
@td.with_url
def setup_with_tools(self):
pass
def test_shorturl_add(self):
response = self.app.get('/admin/url/add')
response.form['short_url'] = 'test'
response.form['full_url'] = 'http://www.google.com/'
response.form.submit()
redir = self.app.get('/url/test', status=302)
assert_equal(redir.location, 'http://www.google.com/')
def test_shorturl_http_head(self):
response = self.app.get('/admin/url/add')
response.form['short_url'] = 'test'
response.form['full_url'] = 'http://www.google.com/'
response.form.submit()
r = self.app.head('/url/test', status=302)
assert_equal(r.location, 'http://www.google.com/')
def test_shorturl_update(self):
response = self.app.get('/admin/url/add')
response.form['short_url'] = 'g'
response.form['full_url'] = 'http://www.google.com/'
response.form.submit()
redir = self.app.get('/url/g', status=302)
assert_equal(redir.location, 'http://www.google.com/')
response = self.app.get('/url/')
form = response.forms['short-url-form']
form['update'] = 'True'
form['short_url'] = 'g'
form['full_url'] = 'http://www.yahoo.com/'
form.action = '/admin/url/add/'
form.submit()
redir = self.app.get('/url/g', status=302)
assert_equal(redir.location, 'http://www.yahoo.com/')
def test_shorturl_not_found(self):
self.app.post('/admin/url/add',
dict(short_url='test',
full_url='http://www.google.com/',
description="description2"))
r = self.app.get('/url/test2', status=404)
r = self.app.get('/url/')
assert 'http://www.google.com/' in r
def test_shorturl_private(self):
self.app.post('/admin/url/add',
dict(short_url='test_private',
full_url='http://www.amazone.com/',
private='on',
description="description1"))
r = self.app.get('/url/')
assert 'http://www.amazone.com/' in r
assert '<td><small>yes</small></td>' in r
self.app.get('/url/test_private',
extra_environ=dict(username=str('*anonymous')),
status=404)
self.app.get('/url/test_private',
status=302)
def test_shorturl_errors(self):
d = dict(short_url='amazone',
full_url='amazone')
r = self.app.post('/admin/url/add', params=d)
assert 'error' in self.webflash(r)
d = dict(short_url='test', full_url='http://google.com/')
r = self.app.post('/admin/url/add', params=d)
d['full_url'] = 'http://yahoo.com'
r = self.app.post('/admin/url/add', params=d)
assert 'exists' in self.webflash(r)
def test_shorturl_chars_restrictions(self):
d = dict(short_url='', full_url='http://domain.net/')
r = self.app.post('/admin/url/add', params=d)
assert ShortUrl.query.find(
dict(app_config_id=c.app.config._id)).count() == 0
assert 'Please enter a value' in self.webflash(r)
d = dict(short_url='g*', full_url='http://domain.net/')
r = self.app.post('/admin/url/add', params=d)
assert ShortUrl.query.find(
dict(app_config_id=c.app.config._id)).count() == 0
assert 'Short url: must include only letters, numbers, dashes and underscores.' in self.webflash(
r)
def test_shorturl_remove(self):
self.app.post('/admin/url/add',
params=dict(short_url='g', full_url='http://google.com/'))
assert ShortUrl.query.find(
dict(app_config_id=c.app.config._id)).count() == 1
self.app.post('/admin/url/remove', params=dict(shorturl='g'))
assert ShortUrl.query.find(
dict(app_config_id=c.app.config._id)).count() == 0
def test_shorturl_permissions(self):
self.app.post('/admin/url/add',
params=dict(short_url='g',
full_url='http://google.com/'),
extra_environ=dict(username=str('test-user')), status=403)
self.app.post('/admin/url/remove', params=dict(shorturl='g'),
extra_environ=dict(username=str('test-user')), status=403)
def test_build_short_url(self):
with h.push_config(config, **{
'short_url.url_pattern': '{base_url}:{nbhd}:{project}:{mount_point}:{short_name}',
'base_url': 'b',
}):
nbhd = mock.Mock(url_prefix='/n/')
project = mock.Mock(shortname='p', neighborhood=nbhd)
app = mock.Mock(project=project)
app.config.options.mount_point = 'm'
url = ShortUrl.build_short_url(app, 's')
assert_equal(url, 'b:n:p:m:s')
def test_short_url(self):
response = self.app.get('/admin/url/add')
response.form['short_url'] = 'test'
response.form['full_url'] = 'http://www.google.com/'
response.form.submit()
surl = ShortUrl.query.get(short_name='test')
with h.push_config(config, **{
'short_url.url_pattern': '{base_url}:{nbhd}:{project}:{mount_point}:{short_name}',
'base_url': 'b',
}):
assert_equal(surl.short_url(), 'b:p:test:url:test')
| 40.869048
| 105
| 0.593941
|
77dc6b3205ee1b7d61b6351c223c1619c795588c
| 6,593
|
py
|
Python
|
analyze.py
|
venkyr77/FCN-Project
|
ff23e0087680df422705b436c75ba19c4bc2c58f
|
[
"Apache-2.0"
] | null | null | null |
analyze.py
|
venkyr77/FCN-Project
|
ff23e0087680df422705b436c75ba19c4bc2c58f
|
[
"Apache-2.0"
] | null | null | null |
analyze.py
|
venkyr77/FCN-Project
|
ff23e0087680df422705b436c75ba19c4bc2c58f
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import functools
from scipy import stats
from os import walk
from pylab import *
abr_algo_types = ["Bola - E", "Dynamic", "Throughput"]
network_profiles = ["PROFILE_CASCADE", "PROFILE_INTRA_CASCADE", "PROFILE_SPIKE", "PROFILE_SLOW_JITTERS", "PROFILE_FAST_JITTERS"]
def t_test(a, b, alpha = 0.05):
t, p = stats.ttest_ind(b, a)
if(p/2 < alpha and t > 0):
return True
else:
return False
def rank_using_t_test(bola_e_values, throughput_values, dynamic_values):
answer_dict = {}
answer_dict['Bola'] = 0
answer_dict['Throughput'] = 0
answer_dict['Dynamic'] = 0
if t_test(bola_e_values, throughput_values):
answer_dict['Bola'] = answer_dict['Bola'] + 1
else:
answer_dict['Throughput'] = answer_dict['Throughput'] + 1
if t_test(throughput_values, dynamic_values):
answer_dict['Throughput'] = answer_dict['Throughput'] + 1
else:
answer_dict['Dynamic'] = answer_dict['Dynamic'] + 1
if t_test(bola_e_values, dynamic_values):
answer_dict['Bola'] = answer_dict['Bola'] + 1
else:
answer_dict['Dynamic'] = answer_dict['Dynamic'] + 1
print(" > ".join(dict(sorted(answer_dict.items(), key=lambda item: item[1])).keys()))
print("")
def plot_cdf(a, abr_algo_type):
x = np.sort(a)
y = np.arange(len(x))/float(len(x))
plt.plot(x, y, label=abr_algo_type)
plt.legend(loc="upper left")
plt.ylim(-1.5, 2.0)
def analyse_stall_duration(data_set):
print("\n------------Analysing Stall Durations-----------\n")
for network_profile in network_profiles:
print("Network profile:", network_profile)
ax = data_set[['profile', 'abr_algo_type', 'stall_duration']].loc[data_set['profile'] == network_profile].boxplot(by = 'abr_algo_type')
ax.set_title('Network profile ' + network_profile)
ax.set_xlabel('ABR Algorithms')
ax.set_ylabel('Stall Duration')
df = data_set.loc[data_set['profile'] == network_profile]
a = df.loc[df['abr_algo_type'] == 'Bola - E'].stall_duration.to_numpy()
b = df.loc[df['abr_algo_type'] == 'Throughput'].stall_duration.to_numpy()
c = df.loc[df['abr_algo_type'] == 'Dynamic'].stall_duration.to_numpy()
#plot_cdf(a, 'Bola - E');
#plot_cdf(b, 'Throughput');
#plot_cdf(c, 'Dynamic');
#plt.xlabel("Stalling Time")
#plt.ylabel("CDF of Stalling Time")
#plt.title("CDF of Stalling Time in network condition " + network_profile)
gg = []
gg.append(a.mean())
gg.append(b.mean())
gg.append(c.mean())
print("BOLA - E :", a.mean(), "Throughput:", b.mean(), "Dynamic:", c.mean())
print(np.diff(gg) / np.abs(gg[:-1]) * 100)
plt.show()
#rank_using_t_test(a, b, c)
print("------------------------------------------------\n")
def analyse_average_latency(data_set):
print("\n------------Analysing Average Latency-----------\n")
for network_profile in network_profiles:
print("Network profile:", network_profile)
ax = data_set[['profile', 'abr_algo_type', 'average_latency']].loc[data_set['profile'] == network_profile].boxplot(by = 'abr_algo_type')
ax.set_title('Network profile ' + network_profile)
ax.set_xlabel('ABR Algorithms')
ax.set_ylabel('Average Latency')
df = data_set.loc[data_set['profile'] == network_profile]
a = df.loc[df['abr_algo_type'] == 'Bola - E'].average_latency.to_numpy()
b = df.loc[df['abr_algo_type'] == 'Throughput'].average_latency.to_numpy()
c = df.loc[df['abr_algo_type'] == 'Dynamic'].average_latency.to_numpy()
#plot_cdf(a, 'Bola - E');
#plot_cdf(b, 'Throughput');
#plot_cdf(c, 'Dynamic');
#plt.xlabel("Average Latency")
#plt.ylabel("CDF of Average Latency")
#plt.title("CDF of Stalling Time in network condition " + network_profile)
gg = []
gg.append(a.mean())
gg.append(b.mean())
gg.append(c.mean())
print("BOLA - E :", a.mean(), "Throughput:", b.mean(), "Dynamic:", c.mean())
print(np.diff(gg) / np.abs(gg[:-1]) * 100)
plt.show()
#rank_using_t_test(a, b, c)
print("------------------------------------------------\n")
def analyse_average_bitrate(data_set):
print("\n------------Analysing Average Bit rate-----------\n")
for network_profile in network_profiles:
print("Network profile:", network_profile)
ax = data_set[['profile', 'abr_algo_type', 'average_bitrate']].loc[data_set['profile'] == network_profile].boxplot(by = 'abr_algo_type')
ax.set_title('Network profile ' + network_profile)
ax.set_xlabel('ABR Algorithms')
ax.set_ylabel('Average Bitrate')
df = data_set.loc[data_set['profile'] == network_profile]
a = df.loc[df['abr_algo_type'] == 'Bola - E'].average_latency.to_numpy()
b = df.loc[df['abr_algo_type'] == 'Throughput'].average_latency.to_numpy()
c = df.loc[df['abr_algo_type'] == 'Dynamic'].average_latency.to_numpy()
rank_using_t_test(a, b, c)
print("------------------------------------------------\n")
def analyze(data_set):
#analyse_stall_duration(data_set)
analyse_average_latency(data_set)
#analyse_average_bitrate(data_set)
result_dir = './Final_Results'
abr_algo_type_list = []
profile_list = []
data_set = pd.DataFrame()
stall_duration_list = []
average_latency_list = []
average_buffer_length_list = []
average_bitrate_list = []
num_switches_list = []
for abr_algo in abr_algo_types:
for network_profile in network_profiles:
latency_list_for_runs = []
path = result_dir + '/' + abr_algo + '/' + network_profile
for f in os.listdir(path):
_json = open(path + '/' + f)
data = json.load(_json)
abr_algo_type_list.append(abr_algo)
profile_list.append(network_profile)
stall_duration_list.append(data["stallDuration"])
average_latency_list.append(data["averageLatency"])
average_buffer_length_list.append(data["averageBufferLength"])
average_bitrate_list.append(data["averageBitrate"])
num_switches_list.append(data["numSwitches"])
data_set['abr_algo_type'] = abr_algo_type_list
data_set['profile'] = profile_list
data_set['stall_duration'] = stall_duration_list
data_set['average_latency'] = average_latency_list
data_set['average_buffer_length'] = average_buffer_length_list
data_set['average_bitrate'] = average_bitrate_list
data_set['num_switches'] = num_switches_list
#print(data_set)
#analyze(data_set)
testt = {}
for abr_algo in abr_algo_types:
qoefile_path = result_dir + '/' + abr_algo + '/qoe.json'
_json = open(qoefile_path)
data = json.load(_json)
aut = list(data.values())
testt[abr_algo] = sum(aut) / len(aut)
plt.xlabel("ABR Algorithms")
plt.ylabel("QoE score")
plt.bar(*zip(*testt.items()))
plt.show()
#plt.show()
| 29.832579
| 138
| 0.677688
|
d9abec5e7863d9f381bb3c73fbb8ca42d03bc591
| 10,984
|
py
|
Python
|
FirmsLocations/Preprocess/firms_preprocessor.py
|
tgquintela/Firms_locations
|
476680cbc3eb1308811633d24810049e215101a0
|
[
"MIT"
] | null | null | null |
FirmsLocations/Preprocess/firms_preprocessor.py
|
tgquintela/Firms_locations
|
476680cbc3eb1308811633d24810049e215101a0
|
[
"MIT"
] | null | null | null |
FirmsLocations/Preprocess/firms_preprocessor.py
|
tgquintela/Firms_locations
|
476680cbc3eb1308811633d24810049e215101a0
|
[
"MIT"
] | null | null | null |
"""
Module oriented to group the classes and functions which tasks are to
preprocess the data and prepare new data and structuctures to be use in the
processes we want to perform.
TODO:
-----
- Reverse process
- Processor class
"""
import numpy as np
import pandas as pd
import os
#import pandas as pd
import shelve
#from itertools import product
from ..Preprocess.geo_filters import get_statistics2fill_locations,\
fill_locations, fill_nulls
from ..Preprocess.aux_standarization_functions import pre_read_servicios,\
get_sequencial_servicios, read_manufactures
from ..Preprocess.preprocess_cols import create_CA_column,\
join_and_store_empresas_temporal, join_and_store_empresas_atemporal,\
join_empresas_atemporal, compute_apertura_cierre, generate_replace,\
create_sector_columns, clean_colnames_manu, transform_cnae_col,\
store_empresas_atemporal_years
from ..Preprocess.financial_interpolation import financial_interpolation
from ..IO.standarization_io_utils import write_ca_cp, write_locs_statistics,\
write_uncorrect_locs, write_ca2code, write_nif2code, write_cp2code,\
write_nif2names
from pythonUtils.ProcessTools import Processer
from pythonUtils.Logger import Logger
from preprocess_cols import generate_replace, transform_cnae_col,\
create_sector_columns, clean_colnames_manu
from pySpatialTools.Preprocess.Transformations.Transformation_2d import\
general_projection
CA_names = ['Andalucia', 'Aragon', 'Asturias', 'Balears', 'Canarias',
'Cantabria', 'CastillaLeon', 'CastillaMancha', 'Catalunya',
'Ceuta_Melilla', 'Extremadura', 'Galicia', 'LaRioja_Navarra',
'Madrid', 'Murcia', 'Pais Vasco', 'Valencia']
loc_vars, reg_var = ['ES-X', 'ES-Y'], 'cp'
class Firms_PrePrecomputations(Processer):
"""Class processer which standarizes and cleans firms data."""
def _initialization(self):
self.proc_name = "Main precomputations"
self.proc_desc = """Main precomputations of features and values."""
self.subproc_desc = ["Pfeatures computation", "Qvalues computation"]
self.t_expended_subproc = [0, 0]
def __init__(self, logfile, pathdata):
self._initialization()
self.logfile = Logger(logfile) if type(logfile) == str else logfile
self.pathdata = pathdata
def compute(self):
## 0. Set vars
t00 = self.setting_global_process()
## 1. Precompute pointfeatures
t0 = self.set_subprocess([0])
pass
class Firms_Standarization(Processer):
"""Class processer which standarizes and cleans firms data."""
def _initialization(self):
self.proc_name = "Standarization and cleaning empresas data"
self.proc_desc = """Preprocessing data of empresas in order to be
treatable and easy retrievable from files."""
self.subproc_desc = ["Get CA-CP dictionary",
"Standarization of manufactures",
"Standarization of services and storing"]
self.t_expended_subproc = [0, 0, 0]
def __init__(self, logfile, pathdata):
self._initialization()
self.logfile = Logger(logfile) if type(logfile) == str else logfile
self.pathdata = pathdata
def clean_raw_data(self, pathdata):
## 0. Set vars
t00 = self.setting_global_process()
## 1. Pre-read servicios
t0 = self.set_subprocess([0])
ca_cp_dict, raw_locs_serv, nifs_serv, cps_serv, names_serv,\
muni_serv, null_serv_cp, null_serv_muni, null_serv_locs =\
pre_read_servicios(pathdata)
self.close_subprocess([0], t0)
## 2. Read and process manufactures
t0 = self.set_subprocess([1])
# Read
manufactures, raw_locs_manu, nifs_manu, cps_manu, names_manu,\
muni_manu, null_manu_cp, null_manu_muni, null_manu_locs =\
read_manufactures(pathdata)
# Joining useful data
nifs = nifs_serv + nifs_manu
names = names_serv + names_manu
raw_locs = np.concatenate([raw_locs_serv, raw_locs_manu])
raw_cps = cps_serv + cps_manu
raw_muni = muni_serv + muni_manu
null_cp = np.concatenate(null_serv_cp+[null_manu_cp])
null_muni = np.concatenate(null_serv_muni+[null_manu_muni])
null_locs = np.concatenate([np.concatenate(null_serv_locs, axis=0),
null_manu_locs], axis=0)
# print null_serv_locs, null_manu_locs
# print null_cp.shape, null_muni.shape, null_locs.shape
# nulls = np.logical_and(np.logical_not(null_cp),
# np.logical_not(null_muni))
nnulls = np.logical_and(np.logical_not(null_cp),
np.logical_not(null_locs))
new_raw_locs = raw_locs[nnulls]
new_raw_cps = [raw_cps[i] for i in range(len(raw_cps)) if nnulls[i]]
new_raw_muni = [raw_muni[i] for i in range(len(raw_muni)) if nnulls[i]]
# print nnulls.sum(), len(new_raw_locs), len(new_raw_cps), len(raw_cps)
# Preparing fill locations
mean_locs, std_locs, u_cps =\
get_statistics2fill_locations(new_raw_locs, new_raw_cps)
assert(len(mean_locs) == len(std_locs))
assert(len(std_locs) == len(u_cps))
# Chaging manufactures
manufactures.columns = clean_colnames_manu(manufactures.columns)
manufactures = create_sector_columns(manufactures, 'manufactures')
manufactures = compute_apertura_cierre(manufactures)
manufactures = fill_nulls(manufactures, mean_locs, std_locs, u_cps,
new_raw_muni, new_raw_cps, new_raw_locs,
os.path.join(self.pathdata, 'extra'))
manufactures = create_CA_column(manufactures, ca_cp_dict)
assert('ca' in list(manufactures.columns))
#
# manufactures = fill_locations(manufactures, loc_vars, reg_var,
# mean_locs, std_locs, u_cps)
#
self.close_subprocess([1], t0)
## 3. Standarization and join data
t0 = self.set_subprocess([2])
empresas_atemporal = []
for servicios, ca_name in get_sequencial_servicios(pathdata):
assert(pd.isnull(servicios['nom']).sum() == 0)
servicios = fill_nulls(servicios, mean_locs, std_locs, u_cps,
new_raw_muni, new_raw_cps, new_raw_locs,
os.path.join(self.pathdata, 'extra'))
assert(pd.isnull(servicios['nom']).sum() == 0)
servicios = compute_apertura_cierre(servicios)
assert(pd.isnull(servicios['nom']).sum() == 0)
servicios = create_sector_columns(servicios, 'servicios')
assert(pd.isnull(servicios['nom']).sum() == 0)
servicios.loc[:, 'ca'] = ca_name
join_and_store_empresas_temporal(servicios, manufactures, ca_name,
self.pathdata)
empresas_atemporal_i =\
join_empresas_atemporal(servicios, manufactures, ca_name)
empresas_atemporal.append(empresas_atemporal_i)
store_empresas_atemporal_years(empresas_atemporal_i, ca_name,
self.pathdata)
join_and_store_empresas_atemporal(empresas_atemporal, self.pathdata)
financial_interpolation(self.pathdata)
# Write extradata
u_CA = list(set(ca_cp_dict.values()))
# u_cps = np.unique([e for e in raw_cps if e != float('nan')])
# u_cps = np.unique([e for e in raw_cps if e != '00nan'])
write_ca_cp(ca_cp_dict, self.pathdata)
write_locs_statistics(mean_locs, std_locs, u_cps, self.pathdata)
write_uncorrect_locs(nifs, raw_locs, self.pathdata)
write_ca2code(u_CA, self.pathdata)
write_nif2code(nifs, self.pathdata)
write_cp2code(u_cps, self.pathdata)
write_nif2names(nifs, names, self.pathdata)
self.close_subprocess([2], t0)
self.close_process(t00)
def _self_store(self, namefile):
with shelve.open(namefile) as db:
db['preprocessor'] = self
class Firms_Preprocessor(Processer):
"Special class to preprocess firms data."
def _initialization(self):
self.projection_values = None
self.map_vars = None
self.map_indices = None
self.map_vars = ['cnae', 'cp']
## General process parameters
self.proc_name = "Preprocess empresas"
self.proc_desc = "Preprocessing data in order to be treatable"
self.subproc_desc = ["Reindexing", "Locations transformation",
"Features transformation"]
self.t_expended_subproc = [0, 0, 0]
def __init__(self, typevars, logfile):
self._initialization()
self.typevars = typevars
self.logfile = logfile
def preprocess(self, empresas, cnae_lvl=2, method_proj='ellipsoidal', radians=False):
"Function to preprocess firms data."
## 0. Set vars
t00 = self.setting_global_process()
# Important vars
financial_vars = [e for e in self.typevars['feat_vars'] if e != 'cnae']
loc_vars = self.typevars['loc_vars']
self.projection_values = [loc_vars, method_proj, True, radians]
# 1. Indices
t0 = self.set_subprocess([0])
self.map_indices = zip(list(empresas.index), range(empresas.shape[0]))
empresas.index = range(empresas.shape[0])
self.close_subprocess([0], t0)
# 2. Location transformation
t0 = self.set_subprocess([1])
empresas[loc_vars] = general_projection(empresas, loc_vars,
method=method_proj,
inverse=False,
radians=radians)
self.close_subprocess([1], t0)
## 3. Feature array
t0 = self.set_subprocess([2])
# cnae variable
empresas.loc[:, 'cnae'] = transform_cnae_col(empresas['cnae'], cnae_lvl)
# generate replacement in discrete vars
t_vals = {'cnae': sorted(list(empresas['cnae'].unique())),
'cp': sorted(list(empresas['cp'].unique()))}
self.map_info = generate_replace(t_vals)
# Map discrete variables
mpvars = self.map_vars
empresas.loc[:, mpvars] = empresas.loc[:, mpvars].replace(self.map_info).astype(int)
# Finantial variables
### TODO
self.close_subprocess([2], t0)
## Untrack process
self.close_process(t00)
return empresas
def reverse_preprocess(self, empresas):
## 1. Inverse transformation of locations
projection_values = self.projection_values
empresas[loc_vars] = general_projection(empresas, *projection_values)
## 2. Inverse mapping
##TODO
return empresas
| 41.606061
| 92
| 0.642753
|
0bde7cc4a42695e68bcdc268df4bfdd6257cd39d
| 1,572
|
py
|
Python
|
driver/pullBuzzfeed.py
|
georgetown-analytics/team-buzzfeed
|
a0b1d0481ec3ff3b3134b281865825aaf5effe07
|
[
"MIT"
] | 4
|
2016-05-21T19:28:14.000Z
|
2019-12-23T04:29:53.000Z
|
driver-final/pullBuzzfeed.py
|
georgetown-analytics/team-buzzfeed
|
a0b1d0481ec3ff3b3134b281865825aaf5effe07
|
[
"MIT"
] | 5
|
2016-06-23T18:20:10.000Z
|
2016-07-11T21:14:30.000Z
|
driver/pullBuzzfeed.py
|
georgetown-analytics/team-buzzfeed
|
a0b1d0481ec3ff3b3134b281865825aaf5effe07
|
[
"MIT"
] | 1
|
2020-03-04T01:21:38.000Z
|
2020-03-04T01:21:38.000Z
|
import urllib2
import json
import datetime
def getfeed (country, data):
webUrl = urllib2.urlopen(data)
jsonfeed = "initialized"
if webUrl.getcode() == 200:
jsonfeed = webUrl.read()
else:
jsonfeed = "Received an error from server, cannot get feeds.\n"
now = datetime.datetime.now()
# DIRECTORY WHERE DATA FILES WILL BE STORED
datapath = "/home/ubuntu/buzzfeeddata/"
# WRITE FEED TO FILENAME CCYYYYMMDDHHMMSS, CC IS COUNTRY CODE
todaydate = str(now.date())
nowtime = str(now.time())
feedfilename = str(todaydate) + str(nowtime)
feedfilename = feedfilename.replace("-","")
feedfilename = feedfilename.replace("-","")
feedfilename = feedfilename.replace(":","")
feedfilename = feedfilename.partition(".")[0]
feedfilename = datapath + country + feedfilename + ".txt"
f = open(feedfilename, "w")
f.write(jsonfeed)
f.close()
def main():
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-us"
getfeed("us",urlData)
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-uk"
getfeed("uk",urlData)
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-au"
getfeed("au",urlData)
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-in"
getfeed("in",urlData)
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-ca"
getfeed("ca",urlData)
urlData="https://www.buzzfeed.com/api/v2/feeds/trending?country=en-nz"
getfeed("nz",urlData)
if __name__ == "__main__":
main()
| 33.446809
| 74
| 0.673028
|
ce14b61ea206b2dc95c8719eadf03b5e016e0936
| 12,587
|
py
|
Python
|
RandomWorkflowSpecGenerator.py
|
loicmiller/policy-analysis
|
2c52513440633962b997cf8958ed69d63aabb643
|
[
"MIT"
] | null | null | null |
RandomWorkflowSpecGenerator.py
|
loicmiller/policy-analysis
|
2c52513440633962b997cf8958ed69d63aabb643
|
[
"MIT"
] | null | null | null |
RandomWorkflowSpecGenerator.py
|
loicmiller/policy-analysis
|
2c52513440633962b997cf8958ed69d63aabb643
|
[
"MIT"
] | null | null | null |
###############################################################################
# Imports
import sys
import argparse # Argument parser
import networkx as nx
import matplotlib.pyplot as plt
import random
from sympy import to_dnf
from sympy.parsing.sympy_parser import parse_expr
import ast # String to dict
from importlib import import_module # Import policy metagraph
from pathlib import Path # Create subfolders if not exists
import os
# Combinatorics
import operator as op
from functools import reduce
###############################################################################
# General utility
# Exit the program
def terminate_app(code):
print("Exiting program...")
sys.exit(code)
###############################################################################
# Argument parser
class Range(object):
def __init__(self, start, end):
self.start = start
self.end = end
def __eq__(self, other):
return self.start <= other <= self.end
def get_parser():
# Get parser for command line arguments
parser = argparse.ArgumentParser(description="Random workflow specification generator", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--version", action="version", version='%(prog)s 1.0')
parser.add_argument("-v", "--verbose", action="count", default=0, help="increase output verbosity")
parser.add_argument("-p", "--plot", action="store_true", help="generate plot for this metagraph")
parser.add_argument("-g", "--generating-set-size", type=int, metavar="GEN_SET_SIZE", default=10, help="size of generating set for the generated workflow")
parser.add_argument("-e", "--edge-probability", type=float, choices=[Range(0.0, 1.0)], metavar="EDGE_PROB", default=1.0, help="probability of edges in the generated workflow")
parser.add_argument("-s", "--policy-size", type=int, metavar="POLICY_SIZE", default=0, help="size of edge policies in the generated workflow")
parser.add_argument("-m", "--max-vertex-size", type=int, metavar="MAX_VERTEX_SIZE", default=2, help="maximum size of vertices in the generated workflow")
parser.add_argument("-f", "--edge-ratio", type=float, metavar="EDGE_RATIO", default=1.0, help="ratio of edges to elements of the generating set in the generated workflow")
return parser
###############################################################################
# Functions
# Old graph generator - UNUSED
def generate_workflow_graph(node_number, edge_probability):
G = nx.gnp_random_graph(node_number, edge_probability, directed=True)
if glob_verbose >= 2:
print("Graph nodes: {}".format(G.nodes))
print("Graph edges: {}\n".format(G.edges))
nodes = list(G.nodes)
for src, dst in zip(nodes, nodes[1:]):
if not G.has_edge(src, dst):
if glob_verbose >= 2:
print("Creating edge: {} {}".format(src, dst))
G.add_edge(src, dst)
if glob_verbose >= 1:
print("Clean graph nodes: {}".format(G.nodes))
print("Clean graph edges: {}\n".format(G.edges))
return G
# Generates disjoint invertex and outvertex from generating set
def generate_vertices(generating_set, max_vertex_size):
if max_vertex_size <= 1 or len(generating_set) <= 1:
invertex = set(random.sample(generating_set, 1))
else:
invertex = set(random.sample(generating_set, random.randrange(1, min(len(generating_set) - 1, max_vertex_size)))) # Leave at least one element for outvertex
remaining_elements = generating_set.difference(invertex)
if len(remaining_elements) > 1:
if max_vertex_size <= 1:
outvertex = set(random.sample(remaining_elements, 1))
else:
outvertex = set(random.sample(remaining_elements, random.randrange(1, min(len(remaining_elements) + 1, max_vertex_size)))) # Pick from remaining elements
else: # len(remaining_elements) == 1
outvertex = remaining_elements
if not invertex.isdisjoint(outvertex): # Make sure invertex and outvertex are disjoint
terminate_app(1)
return invertex, outvertex
def proposition_combination(propositions, operators):
# Pick two random attributes
first_rand = random.randrange(len(propositions))
second_rand = random.randrange(len(propositions))
while second_rand == first_rand:
second_rand = random.randrange(len(propositions))
first_prop = propositions[first_rand]
second_prop = propositions[second_rand]
# Generate expression
expression = "((" + first_prop + ") " + operators[random.randrange(len(operators))] + " (" + second_prop + "))"
return expression
def expr_generator(propositions, operators, size):
if size == 0:
return ""
elif size == 1:
return proposition_combination(propositions, operators)
return "(" + expr_generator(propositions, operators, size-1) + " " + operators[random.randrange(len(operators))] + " " + expr_generator(propositions, operators, size-1) + ")"
###############################################################################
# Main
def main(verbose, plot, generating_set_size, edge_probability, policy_size, max_vertex_size, edge_ratio):
global glob_verbose
glob_verbose = verbose
print("\n\n\n###############################################################################")
print("Generating workflow metagraph")
print("###############################################################################")
print("Number of elements in generating set: {}".format(generating_set_size))
print("Edge probability: {}".format(edge_probability))
generating_set = {el for el in range(generating_set_size)}
print("Generating set: {}".format(generating_set))
edges_numb = int(abs(generating_set_size * edge_ratio)) # Match ratio of Matthew article
if glob_verbose >= 1:
subsets_numb = (2**generating_set_size) # Number of subsets
proper_subsets_numb = subsets_numb - 2 # Proper subsets with no empty set
# https://math.stackexchange.com/questions/1223425/total-number-of-unordered-pairs-of-disjoint-subsets-of-s/1223442
disjoint_subsets_numb_ordered = 3**generating_set_size
disjoint_subsets_numb_ordered_non_empty = disjoint_subsets_numb_ordered - ((2 * subsets_numb) - 1) # Remove ordered pairs with empty set
disjoint_subsets_numb_unordered = int((((disjoint_subsets_numb_ordered) - 1) // 2) + 1)
disjoint_subsets_numb_unordered_non_empty = disjoint_subsets_numb_unordered - subsets_numb # Remove unordered pairs with empty set
max_edges_numb = generating_set_size * (generating_set_size - 1)
print("Number of subsets: {}".format(subsets_numb))
print("Number of proper subsets (no empty set): {}".format(proper_subsets_numb))
print("Number of pairs of ordered disjoint subsets: {}".format(disjoint_subsets_numb_ordered))
print("Number of pairs of ordered disjoint subsets (no empty set): {}".format(disjoint_subsets_numb_ordered_non_empty))
print("Number of pairs of unordered disjoint subsets: {}".format(disjoint_subsets_numb_unordered))
print("Number of pairs of unordered disjoint subsets (no empty set): {}".format(disjoint_subsets_numb_unordered_non_empty))
print("Number of possible edges in simple graph: {}".format(max_edges_numb))
print("Generate {} edges for {} nodes\n".format(edges_numb, generating_set_size))
# Limit invertex/outvertex size to half of the generating set
edges = []
for i in range(edges_numb):
if random.uniform(0,1) <= edge_probability: # Percent chance
invertex, outvertex = generate_vertices(generating_set, max_vertex_size) # Generate disjoint invertex and outvertex
while (invertex, outvertex) in edges: # Can't have edge already in set
if glob_verbose >= 3:
print("{} already in list {}".format((invertex, outvertex), edges))
invertex, outvertex = generate_vertices(generating_set, max_vertex_size)
if glob_verbose >= 2:
print("Invertex: {}".format(invertex))
print("Outvertex: {}".format(outvertex))
edges.append((invertex, outvertex))
if glob_verbose >= 2:
print("Edge set ({} elements)".format(len(edges)))
print("Ratio of edges to possible edges vs edge probability: {:.3} --- {}".format((len(edges) / disjoint_subsets_numb_unordered_non_empty), edge_probability))
if glob_verbose >= 3:
print("Edges: {}".format(edges))
# Add simple edges for workflow-like loop, except back to owner
nodes = list(generating_set)
for src, dst in zip(nodes, nodes[1:]):
if ({src}, {dst}) not in edges:
if glob_verbose >= 2:
print("Creating edge: {} {}".format(src, dst))
edges.append(({src}, {dst}))
if glob_verbose >= 2:
print("Edge set ({} elements)".format(len(edges)))
print("Ratio of edges to possible edges vs edge probability: {:.3} --- {}".format((len(edges) / disjoint_subsets_numb_ordered_non_empty), edge_probability))
if glob_verbose >= 3:
print("Edges: {}".format(edges))
print("\nPolicy size: {}".format(policy_size))
# Adding attributes to graph
propositions = ["tenure > 10", "time < 8", "time > 17", "is_customer", "is_employee", "is_admin"]
if glob_verbose >= 1:
print("Allowed propositions: {}".format(propositions))
operators = ["&", "|"]
if glob_verbose >= 1:
print("Allowed operators: {}".format(operators))
edges_with_policy = []
for edge in edges:
edge_policy = expr_generator(propositions, operators, policy_size)
if glob_verbose >= 2:
print("\nEdge policy: {}".format(edge_policy))
edges_with_policy.append((edge[0], edge[1], edge_policy))
if glob_verbose >= 2:
print("\n")
if glob_verbose >= 1:
print("Generating set: {}".format(generating_set))
print("Edges {}".format(edges_with_policy))
print("\n\n###############################################################################")
print("Generating workflow specification file")
print("###############################################################################")
# Generate output workflow specification name
random_workflow_dir = "workflow-specs/randomly-generated/" + str(generating_set_size) + "-set-" + str(edge_probability).split('.')[0] + "-" + str(edge_probability).split('.')[-1] + "-edges-" + str(policy_size) + "-policy/"
Path(random_workflow_dir).mkdir(parents=True, exist_ok=True)
# Determine file uid
random_workflow_filenames = os.listdir(random_workflow_dir)
if not random_workflow_filenames: # Dir empty
uid = "1"
else:
uids = []
for random_workflow_filename in random_workflow_filenames:
uids.append(int(random_workflow_filename.split('.')[0]))
max_uid = max(uids)
uid = str(max_uid + 1)
output_spec_name = random_workflow_dir + uid + ".dat"
print("Output spec file: {}".format(output_spec_name))
workflow_spec = []
for edge in edges_with_policy:
if glob_verbose >= 1:
print(edge)
workflow_spec.append("{};{};{}\n".format(edge[0], edge[1], edge[2]))
# Writing policy to file
with open(output_spec_name, 'w') as output_spec:
output_spec.writelines(workflow_spec)
if plot: # Plot workflow
print("\n\n###############################################################################")
print("Plotting workflow graph")
print("###############################################################################")
plt.subplot(121)
nx.draw(G, with_labels=True)
plt.subplot(122)
nx.draw(G, pos=nx.circular_layout(G), node_color='r', edge_color='b', with_labels=True)
plt.show()
if __name__ == '__main__':
print("\n\n###############################################################################")
print("Getting arguments")
print("###############################################################################")
parser = get_parser() # Create a parser
args = parser.parse_args() # Parse arguments
print(args)
# Call main
main(args.verbose, args.plot, args.generating_set_size, args.edge_probability, args.policy_size, args.max_vertex_size, args.edge_ratio)
terminate_app(0)
###############################################################################
| 42.523649
| 226
| 0.617304
|
aa95d1b7da74ee198ba6235e775d017d8825bf17
| 4,261
|
py
|
Python
|
src/oscar/apps/catalogue/search_handlers.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
src/oscar/apps/catalogue/search_handlers.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
src/oscar/apps/catalogue/search_handlers.py
|
frmdstryr/django-oscar
|
32bf8618ebb688df6ba306dc7703de8e61b4e78c
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf import settings
from django.utils.module_loading import import_string
from django.views.generic.list import MultipleObjectMixin
from oscar.core.loading import get_class, get_model
BrowseCategoryForm = get_class('search.forms', 'BrowseCategoryForm')
SearchHandler = get_class('search.search_handlers', 'SearchHandler')
is_solr_supported = get_class('search.features', 'is_solr_supported')
is_elasticsearch_supported = get_class('search.features', 'is_elasticsearch_supported')
Product = get_model('catalogue', 'Product')
def get_product_search_handler_class():
"""
Determine the search handler to use.
Currently only Solr is supported as a search backend, so it falls
back to rudimentary category browsing if that isn't enabled.
"""
# Use get_class to ensure overridability
if settings.OSCAR_PRODUCT_SEARCH_HANDLER is not None:
return import_string(settings.OSCAR_PRODUCT_SEARCH_HANDLER)
if is_solr_supported():
return get_class('catalogue.search_handlers', 'SolrProductSearchHandler')
elif is_elasticsearch_supported():
return get_class(
'catalogue.search_handlers', 'ESProductSearchHandler',
)
else:
return get_class(
'catalogue.search_handlers', 'SimpleProductSearchHandler')
class SolrProductSearchHandler(SearchHandler):
"""
Search handler specialised for searching products. Comes with optional
category filtering. To be used with a Solr search backend.
"""
form_class = BrowseCategoryForm
model_whitelist = [Product]
paginate_by = settings.OSCAR_PRODUCTS_PER_PAGE
def __init__(self, request_data, full_path, categories=None):
self.categories = categories
super().__init__(request_data, full_path)
def get_search_queryset(self):
sqs = super().get_search_queryset()
if self.categories:
# We use 'narrow' API to ensure Solr's 'fq' filtering is used as
# opposed to filtering using 'q'.
pattern = ' OR '.join([
'"%s"' % sqs.query.clean(c.full_name) for c in self.categories])
sqs = sqs.narrow('category_exact:(%s)' % pattern)
sqs = sqs.filter_and(is_enabled=True)
return sqs
class ESProductSearchHandler(SearchHandler):
"""
Search handler specialised for searching products. Comes with optional
category filtering. To be used with an ElasticSearch search backend.
"""
form_class = BrowseCategoryForm
model_whitelist = [Product]
paginate_by = settings.OSCAR_PRODUCTS_PER_PAGE
def __init__(self, request_data, full_path, categories=None):
self.categories = categories
super().__init__(request_data, full_path)
def get_search_queryset(self):
sqs = super().get_search_queryset()
if self.categories:
sqs = sqs.filter_and(category__in=self.categories)
sqs = sqs.filter_and(is_enabled=True)
return sqs
class SimpleProductSearchHandler(MultipleObjectMixin):
"""
A basic implementation of the full-featured SearchHandler that has no
faceting support, but doesn't require a Haystack backend. It only
supports category browsing.
Note that is meant as a replacement search handler and not as a view
mixin; the mixin just does most of what we need it to do.
"""
paginate_by = settings.OSCAR_PRODUCTS_PER_PAGE
def __init__(self, request_data, full_path, categories=None):
self.categories = categories
self.kwargs = {'page': request_data.get('page', 1)}
self.object_list = self.get_queryset()
def get_queryset(self):
qs = Product.objects.browsable().base_queryset()
qs = qs.filter(is_enabled=True)
if self.categories:
qs = qs.filter(categories__in=self.categories).distinct()
return qs
def get_search_context_data(self, context_object_name):
# Set the context_object_name instance property as it's needed
# internally by MultipleObjectMixin
self.context_object_name = context_object_name
context = self.get_context_data(object_list=self.object_list)
context[context_object_name] = context['page_obj'].object_list
return context
| 38.387387
| 87
| 0.710631
|
69449388b68572adf5a23cbaa318947049736303
| 45
|
py
|
Python
|
src/toolkit/scripts/__init__.py
|
pombredanne/fabric8-analytics-nvd-toolkit
|
c0e2b963f0fda974007fa56809a87b5d4eb63ffb
|
[
"Apache-2.0"
] | 5
|
2018-05-17T10:09:41.000Z
|
2020-12-13T15:04:12.000Z
|
src/toolkit/scripts/__init__.py
|
pombredanne/fabric8-analytics-nvd-toolkit
|
c0e2b963f0fda974007fa56809a87b5d4eb63ffb
|
[
"Apache-2.0"
] | 68
|
2018-04-17T20:26:19.000Z
|
2021-06-01T22:08:36.000Z
|
src/toolkit/scripts/__init__.py
|
pombredanne/fabric8-analytics-nvd-toolkit
|
c0e2b963f0fda974007fa56809a87b5d4eb63ffb
|
[
"Apache-2.0"
] | 6
|
2018-04-17T10:09:24.000Z
|
2020-12-02T17:56:35.000Z
|
"""Package containing executable scripts."""
| 22.5
| 44
| 0.755556
|
64180f8bb2778b4afc038c8e233d2c7c887c4cbd
| 1,471
|
py
|
Python
|
cc2ai/scripts/make_cube_difference.py
|
jcartus/SCFInitialGuess
|
e4a9280e8cbabb126946e47affa652243b74753c
|
[
"MIT"
] | 1
|
2020-03-02T02:36:59.000Z
|
2020-03-02T02:36:59.000Z
|
cc2ai/scripts/make_cube_difference.py
|
jcartus/SCFInitialGuess
|
e4a9280e8cbabb126946e47affa652243b74753c
|
[
"MIT"
] | null | null | null |
cc2ai/scripts/make_cube_difference.py
|
jcartus/SCFInitialGuess
|
e4a9280e8cbabb126946e47affa652243b74753c
|
[
"MIT"
] | null | null | null |
"""This script will create the difference between two cube files
for the same molecule
Author:
Johannes Cartus, QCIEP, TU Graz
"""
import numpy as np
import argparse
def main(cube_1, cube_2, outfile):
with open(cube_1, 'r') as f1:
lines1 = f1.readlines()
with open(cube_2, 'r') as f2:
lines2 = f2.readlines()
number_of_atoms = int(lines1[2].split()[0])
end_of_header = 6 + number_of_atoms # first row after header
header = lines1[:(end_of_header)]
def extract(x):
return np.array(list(map(float, x.split())))
def format(x):
return "{:12.6f}".format(x)
with open(outfile, 'a') as fout:
fout.write("".join(header))
for (lhs, rhs) in zip(lines1[end_of_header:], lines2[end_of_header:]):
diff = extract(lhs) - extract(rhs)
fout.write("".join(map(format, diff)) + "\n")
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='PROG',
description="This program calculate the difference of two cube files."
)
parser.add_argument(
"--lhs",
help="lhs - rhs = out",
dest="lhs"
)
parser.add_argument(
"--rhs",
help="lhs - rhs = out",
dest="rhs"
)
parser.add_argument(
"--out",
help="lhs - rhs = out",
dest="out"
)
args = parser.parse_args()
main(
args.lhs,
args.rhs,
args.out
)
| 20.150685
| 79
| 0.560843
|
82c31e3b1cf6f9d2aec1edf5770f21944be99374
| 8,361
|
py
|
Python
|
esque/cli/commands/produce.py
|
real-digital/esque
|
0b779fc308ce8bce45c1903f36c33664b2e832e7
|
[
"MIT"
] | 29
|
2019-05-10T21:12:38.000Z
|
2021-08-24T08:09:49.000Z
|
esque/cli/commands/produce.py
|
real-digital/esque
|
0b779fc308ce8bce45c1903f36c33664b2e832e7
|
[
"MIT"
] | 103
|
2019-05-17T07:21:41.000Z
|
2021-12-02T08:29:00.000Z
|
esque/cli/commands/produce.py
|
real-digital/esque
|
0b779fc308ce8bce45c1903f36c33664b2e832e7
|
[
"MIT"
] | 2
|
2019-05-28T06:45:14.000Z
|
2019-11-21T00:33:15.000Z
|
import pathlib
import click
from esque.cli.autocomplete import list_contexts, list_topics
from esque.cli.helpers import ensure_approval
from esque.cli.options import State, default_options
from esque.cli.output import blue_bold, green_bold
from esque.cluster import Cluster
from esque.io.handlers import BaseHandler, KafkaHandler, PathHandler, PipeHandler
from esque.io.handlers.kafka import KafkaHandlerConfig
from esque.io.handlers.path import PathHandlerConfig
from esque.io.handlers.pipe import PipeHandlerConfig
from esque.io.pipeline import PipelineBuilder
from esque.io.serializers import RawSerializer, RegistryAvroSerializer, StringSerializer
from esque.io.serializers.base import MessageSerializer
from esque.io.serializers.raw import RawSerializerConfig
from esque.io.serializers.registry_avro import RegistryAvroSerializerConfig
from esque.io.serializers.string import StringSerializerConfig
from esque.io.stream_decorators import event_counter, yield_only_matching_messages
from esque.resources.topic import Topic
@click.command("produce")
@click.argument("topic", autocompletion=list_topics)
@click.option(
"-d",
"--directory",
metavar="<directory>",
help="Directory containing Kafka messages.",
type=click.STRING,
required=False,
)
@click.option(
"-t",
"--to",
"to_context",
metavar="<destination_ctx>",
help="Destination context.",
type=click.STRING,
autocompletion=list_contexts,
required=False,
)
@click.option(
"-m",
"--match",
metavar="<filter_expresion>",
help="Message filtering expression.",
type=click.STRING,
required=False,
)
@click.option(
"-a",
"--avro",
help="Set this flag if the topic contains avro data. This flag is mutually exclusive with the --binary flag",
default=False,
is_flag=True,
)
@click.option(
"-b",
"--binary",
help="Set this flag if the topic contains binary data. Or the data should not be (de-)serialized. "
"This flag is mutually exclusive with the --avro flag",
default=False,
is_flag=True,
)
@click.option(
"--stdin", "read_from_stdin", help="Read messages from STDIN instead of a directory.", default=False, is_flag=True
)
@click.option(
"-y",
"--ignore-errors",
"ignore_stdin_errors",
help="Only when reading from STDIN. If JSON validation fails, write the malformed JSON as a string in message value"
" (without key and specified partition assignment).",
default=False,
is_flag=True,
)
@default_options
def produce(
state: State,
topic: str,
to_context: str,
directory: str,
avro: bool,
binary: bool,
match: str = None,
read_from_stdin: bool = False,
ignore_stdin_errors: bool = False,
):
"""Produce messages to a topic.
Write messages to a given topic in a given context. These messages can come from either a directory <directory>
that was previously written to with "esque consume" or from JSON objects coming in via STDIN.
If reading from STDIN, then data will be expected as single-line JSON objects with the message key and the
message value always being a string.
The --avro option is currently not supported when reading from STDIN.
With the --binary option those strings are expected to contain the base64 encoded binary data.
By default, the data in the messages is treated utf-8 encoded strings and will be used as-is.
In addition to "key" and "value" one can also define headers as list of objects with a "key" and a "value" attribute
with the former being a string and the latter being a string, "null" or simply not defined.
\b
So valid json objects for reading from stdin would be:
{"key": "foo", "value": "bar", "headers":[{"key":"h1", "value":"v1"},{"key":"h2"}]}
{"key": "foo", "value": null, "partition": 1}
{"key": "foo"}
\b
EXAMPLES:
# Write all messages from the files in <directory> to TOPIC in the <destination_ctx> context.
esque produce -d <directory> -t <destination_ctx> TOPIC
\b
# Start environment in terminal to write messages to TOPIC in the <destination_ctx> context.
esque produce --stdin -f <destination_ctx> -y TOPIC
\b
# Copy source_topic to destination_topic.
esque consume -f first-context --stdout source_topic | esque produce -t second-context --stdin destination_topic
"""
if not to_context:
to_context = state.config.current_context
state.config.context_switch(to_context)
if not read_from_stdin:
if not directory:
raise ValueError("Need to provide directory if not reading from stdin.")
else:
directory = pathlib.Path(directory)
elif avro:
raise ValueError("Cannot read avro data from stdin. Use a directory instead.")
if binary and avro:
raise ValueError("Cannot set data to be interpreted as binary AND avro.")
topic_controller = Cluster().topic_controller
if not topic_controller.topic_exists(topic):
if ensure_approval(f"Topic {topic!r} does not exist, do you want to create it?", no_verify=state.no_verify):
topic_controller.create_topics([Topic(topic)])
else:
click.echo(click.style("Aborted!", bg="red"))
return
builder = PipelineBuilder()
input_handler = create_input_handler(directory, read_from_stdin)
builder.with_input_handler(input_handler)
input_message_serializer = create_input_message_serializer(directory, avro, binary)
builder.with_input_message_serializer(input_message_serializer)
output_message_serializer = create_output_serializer(avro, binary, topic, state)
builder.with_output_message_serializer(output_message_serializer)
output_handler = create_output_handler(to_context, topic)
builder.with_output_handler(output_handler)
if match:
builder.with_stream_decorator(yield_only_matching_messages(match))
counter, counter_decorator = event_counter()
builder.with_stream_decorator(counter_decorator)
pipeline = builder.build()
pipeline.run_pipeline()
click.echo(
green_bold(str(counter.message_count))
+ " messages successfully produced to topic "
+ blue_bold(topic)
+ " in context "
+ blue_bold(to_context)
+ "."
)
def create_output_handler(to_context: str, topic: str):
output_handler = KafkaHandler(KafkaHandlerConfig(scheme="kafka", host=to_context, path=topic))
return output_handler
def create_output_serializer(avro: bool, binary: bool, topic: str, state: State) -> MessageSerializer:
if binary:
key_serializer = RawSerializer(RawSerializerConfig(scheme="raw"))
value_serializer = key_serializer
elif avro:
config = RegistryAvroSerializerConfig(scheme="reg-avro", schema_registry_uri=state.config.schema_registry)
key_serializer = RegistryAvroSerializer(config.with_key_subject_for_topic(topic))
value_serializer = RegistryAvroSerializer(config.with_value_subject_for_topic(topic))
else:
key_serializer = StringSerializer(StringSerializerConfig(scheme="str"))
value_serializer = key_serializer
message_serializer = MessageSerializer(key_serializer=key_serializer, value_serializer=value_serializer)
return message_serializer
def create_input_handler(directory: pathlib.Path, read_from_stdin: bool) -> BaseHandler:
if read_from_stdin:
handler = PipeHandler(PipeHandlerConfig(scheme="pipe", host="stdin", path=""))
else:
if not directory:
raise ValueError("Need to provide a directory to read from!")
handler = PathHandler(PathHandlerConfig(scheme="path", host="", path=str(directory)))
click.echo(f"Reading data from {blue_bold(str(directory))}.")
return handler
def create_input_message_serializer(directory: pathlib.Path, avro: bool, binary: bool) -> MessageSerializer:
if avro:
serializer = RegistryAvroSerializer(
RegistryAvroSerializerConfig(scheme="reg-avro", schema_registry_uri=f"path:///{directory}")
)
elif binary:
serializer = RawSerializer(RawSerializerConfig(scheme="raw"))
else:
serializer = StringSerializer(StringSerializerConfig(scheme="str"))
return MessageSerializer(key_serializer=serializer, value_serializer=serializer)
| 37.662162
| 120
| 0.723478
|
8f2ca07c7c4719c6c2f39842c73446bf1b49bff3
| 572
|
py
|
Python
|
app/modules/passthroughs/__init__.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 6
|
2021-04-06T19:50:52.000Z
|
2022-01-19T17:42:33.000Z
|
app/modules/passthroughs/__init__.py
|
WildMeOrg/houston
|
8102229421388e44234c07ee6cb73bf705b6fba0
|
[
"Apache-2.0"
] | 491
|
2021-01-20T01:10:00.000Z
|
2022-03-31T19:30:48.000Z
|
app/modules/passthroughs/__init__.py
|
karenc/houston
|
4eaaaf11d61394035e34b55bb847ea7eb4099c61
|
[
"Apache-2.0"
] | 2
|
2021-03-12T02:33:55.000Z
|
2021-03-16T20:18:43.000Z
|
# -*- coding: utf-8 -*-
"""
Passthroughs module
============
"""
from app.extensions.api import api_v1
def init_app(app, **kwargs):
# pylint: disable=unused-argument,unused-variable
"""
Init Passthroughs module.
"""
api_v1.add_oauth_scope(
'passthroughs:read', 'Provide access to EDM and ACM passthroughs'
)
api_v1.add_oauth_scope(
'passthroughs:write', 'Provide write access to EDM and ACM passthroughs'
)
# Touch underlying modules
from . import resources # NOQA
api_v1.add_namespace(resources.edm_pass)
| 22
| 80
| 0.657343
|
4310583468769ebfb9a3a5ed254cd4b4a68662b8
| 737
|
py
|
Python
|
metacritic/metacritic/metacritic/pipelines.py
|
hellozeyu/Web-Scraping-AMA
|
ee9951e5a34b93b35d5f53f999d807199e8754a2
|
[
"MIT"
] | null | null | null |
metacritic/metacritic/metacritic/pipelines.py
|
hellozeyu/Web-Scraping-AMA
|
ee9951e5a34b93b35d5f53f999d807199e8754a2
|
[
"MIT"
] | null | null | null |
metacritic/metacritic/metacritic/pipelines.py
|
hellozeyu/Web-Scraping-AMA
|
ee9951e5a34b93b35d5f53f999d807199e8754a2
|
[
"MIT"
] | 2
|
2020-04-14T13:14:03.000Z
|
2021-02-14T20:39:35.000Z
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy.exporters import CsvItemExporter
class MetacriticPipeline(object):
def __init__(self):
self.filename = 'metacritic_scores.csv'
def open_spider(self, spider):
self.csvfile = open(self.filename, 'wb')
self.exporter = CsvItemExporter(self.csvfile)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.csvfile.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
| 26.321429
| 65
| 0.690638
|
224f74c7bc5844cea43da1bfb758a57c68f76b02
| 619
|
py
|
Python
|
sceptre/resolvers/no_value.py
|
ericabrauer/sceptre
|
b44797ae712e1e57c2421f2d93c35a4e8104c5ee
|
[
"Apache-2.0"
] | null | null | null |
sceptre/resolvers/no_value.py
|
ericabrauer/sceptre
|
b44797ae712e1e57c2421f2d93c35a4e8104c5ee
|
[
"Apache-2.0"
] | null | null | null |
sceptre/resolvers/no_value.py
|
ericabrauer/sceptre
|
b44797ae712e1e57c2421f2d93c35a4e8104c5ee
|
[
"Apache-2.0"
] | null | null | null |
from sceptre.resolvers import Resolver
class NoValue(Resolver):
"""This resolver resolves to nothing, functioning just like the AWS::NoValue special value. When
assigned to a resolvable Stack property, it will remove the config key/value from the stack or
the container on the stack where it has been assigned, as if this value wasn't assigned at all.
This is mostly useful for simplifying conditional logic on Stack and StackGroup config files
where, if a certain condition is met, a value is passed, otherwise it's not passed at all.
"""
def resolve(self) -> None:
return None
| 41.266667
| 100
| 0.741519
|
cda9a51eec4a42bce192dba762ea269e4ee25af6
| 33,659
|
py
|
Python
|
tests/app/celery/test_letters_pdf_tasks.py
|
davidbgk/notification-api
|
0ede6a61b48289236d1873124965d2bc22a9b27b
|
[
"MIT"
] | 1
|
2021-08-13T13:46:04.000Z
|
2021-08-13T13:46:04.000Z
|
tests/app/celery/test_letters_pdf_tasks.py
|
davidbgk/notification-api
|
0ede6a61b48289236d1873124965d2bc22a9b27b
|
[
"MIT"
] | null | null | null |
tests/app/celery/test_letters_pdf_tasks.py
|
davidbgk/notification-api
|
0ede6a61b48289236d1873124965d2bc22a9b27b
|
[
"MIT"
] | 1
|
2021-09-29T18:25:48.000Z
|
2021-09-29T18:25:48.000Z
|
from unittest.mock import Mock, call, ANY
import base64
import boto3
from PyPDF2.utils import PdfReadError
from moto import mock_s3
from flask import current_app
from freezegun import freeze_time
import pytest
import requests_mock
from botocore.exceptions import ClientError
from celery.exceptions import MaxRetriesExceededError, Retry
from requests import RequestException
from sqlalchemy.orm.exc import NoResultFound
from app.errors import VirusScanError
from app.celery.letters_pdf_tasks import (
create_letters_pdf,
get_letters_pdf,
collate_letter_pdfs_for_day,
group_letters,
letter_in_created_state,
process_virus_scan_passed,
process_virus_scan_failed,
process_virus_scan_error,
replay_letters_in_error,
_move_invalid_letter_and_update_status,
_sanitise_precompiled_pdf
)
from app.letters.utils import ScanErrorType
from app.models import (
KEY_TYPE_NORMAL,
KEY_TYPE_TEST,
Notification,
NOTIFICATION_CREATED,
NOTIFICATION_DELIVERED,
NOTIFICATION_PENDING_VIRUS_CHECK,
NOTIFICATION_SENDING,
NOTIFICATION_TECHNICAL_FAILURE,
NOTIFICATION_VALIDATION_FAILED,
NOTIFICATION_VIRUS_SCAN_FAILED,
)
from tests.app.db import create_notification, create_letter_branding
from tests.conftest import set_config_values
def test_should_have_decorated_tasks_functions():
assert create_letters_pdf.__wrapped__.__name__ == 'create_letters_pdf'
assert collate_letter_pdfs_for_day.__wrapped__.__name__ == 'collate_letter_pdfs_for_day'
assert process_virus_scan_passed.__wrapped__.__name__ == 'process_virus_scan_passed'
assert process_virus_scan_failed.__wrapped__.__name__ == 'process_virus_scan_failed'
assert process_virus_scan_error.__wrapped__.__name__ == 'process_virus_scan_error'
@pytest.mark.parametrize('personalisation', [{'name': 'test'}, None])
def test_get_letters_pdf_calls_notifications_template_preview_service_correctly(
notify_api, mocker, client, sample_letter_template, personalisation):
contact_block = 'Mr Foo,\n1 Test Street,\nLondon\nN1'
filename = 'opg'
with set_config_values(notify_api, {
'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview',
'TEMPLATE_PREVIEW_API_KEY': 'test-key'
}):
with requests_mock.Mocker() as request_mock:
mock_post = request_mock.post(
'http://localhost/notifications-template-preview/print.pdf', content=b'\x00\x01', status_code=200)
get_letters_pdf(
sample_letter_template,
contact_block=contact_block,
filename=filename,
values=personalisation)
assert mock_post.last_request.json() == {
'values': personalisation,
'letter_contact_block': contact_block,
'filename': filename,
'template': {
'subject': sample_letter_template.subject,
'content': sample_letter_template.content
}
}
@pytest.mark.parametrize('page_count,expected_billable_units', [
('1', 1),
('2', 1),
('3', 2)
])
def test_get_letters_pdf_calculates_billing_units(
notify_api, mocker, client, sample_letter_template, page_count, expected_billable_units):
contact_block = 'Mr Foo,\n1 Test Street,\nLondon\nN1'
filename = 'opg'
with set_config_values(notify_api, {
'TEMPLATE_PREVIEW_API_HOST': 'http://localhost/notifications-template-preview',
'TEMPLATE_PREVIEW_API_KEY': 'test-key'
}):
with requests_mock.Mocker() as request_mock:
request_mock.post(
'http://localhost/notifications-template-preview/print.pdf',
content=b'\x00\x01',
headers={'X-pdf-page-count': page_count},
status_code=200
)
_, billable_units = get_letters_pdf(
sample_letter_template, contact_block=contact_block, filename=filename, values=None)
assert billable_units == expected_billable_units
@freeze_time("2017-12-04 17:31:00")
def test_create_letters_pdf_calls_s3upload(mocker, sample_letter_notification):
mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', '1'))
mock_s3 = mocker.patch('app.letters.utils.s3upload')
create_letters_pdf(sample_letter_notification.id)
mock_s3.assert_called_with(
bucket_name=current_app.config['LETTERS_PDF_BUCKET_NAME'],
file_location='2017-12-04/NOTIFY.FOO.D.2.C.C.20171204173100.PDF',
filedata=b'\x00\x01',
region=current_app.config['AWS_REGION']
)
@freeze_time("2017-12-04 17:31:00")
def test_create_letters_pdf_calls_s3upload_for_test_letters(mocker, sample_letter_notification):
mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', '1'))
mock_s3 = mocker.patch('app.letters.utils.s3upload')
sample_letter_notification.key_type = 'test'
create_letters_pdf(sample_letter_notification.id)
mock_s3.assert_called_with(
bucket_name=current_app.config['TEST_LETTERS_BUCKET_NAME'],
file_location='NOTIFY.FOO.D.2.C.C.20171204173100.PDF',
filedata=b'\x00\x01',
region=current_app.config['AWS_REGION']
)
def test_create_letters_pdf_sets_billable_units(mocker, sample_letter_notification):
mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', 1))
mocker.patch('app.letters.utils.s3upload')
create_letters_pdf(sample_letter_notification.id)
noti = Notification.query.filter(Notification.reference == sample_letter_notification.reference).one()
assert noti.billable_units == 1
def test_create_letters_pdf_non_existent_notification(notify_api, mocker, fake_uuid):
with pytest.raises(expected_exception=NoResultFound):
create_letters_pdf(fake_uuid)
def test_create_letters_pdf_handles_request_errors(mocker, sample_letter_notification):
mock_get_letters_pdf = mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', side_effect=RequestException)
mock_retry = mocker.patch('app.celery.letters_pdf_tasks.create_letters_pdf.retry')
create_letters_pdf(sample_letter_notification.id)
assert mock_get_letters_pdf.called
assert mock_retry.called
def test_create_letters_pdf_handles_s3_errors(mocker, sample_letter_notification):
mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', 1))
error_response = {
'Error': {
'Code': 'InvalidParameterValue',
'Message': 'some error message from amazon',
'Type': 'Sender'
}
}
mock_s3 = mocker.patch('app.letters.utils.s3upload', side_effect=ClientError(error_response, 'operation_name'))
mock_retry = mocker.patch('app.celery.letters_pdf_tasks.create_letters_pdf.retry')
create_letters_pdf(sample_letter_notification.id)
assert mock_s3.called
assert mock_retry.called
def test_create_letters_pdf_sets_technical_failure_max_retries(mocker, sample_letter_notification):
mock_get_letters_pdf = mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', side_effect=RequestException)
mock_retry = mocker.patch(
'app.celery.letters_pdf_tasks.create_letters_pdf.retry', side_effect=MaxRetriesExceededError)
mock_update_noti = mocker.patch('app.celery.letters_pdf_tasks.update_notification_status_by_id')
create_letters_pdf(sample_letter_notification.id)
assert mock_get_letters_pdf.called
assert mock_retry.called
mock_update_noti.assert_called_once_with(sample_letter_notification.id, 'technical-failure')
def test_create_letters_gets_the_right_logo_when_service_has_no_logo(
notify_api, mocker, sample_letter_notification
):
mock_get_letters_pdf = mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', 1))
mocker.patch('app.letters.utils.s3upload')
mocker.patch('app.celery.letters_pdf_tasks.update_notification_status_by_id')
create_letters_pdf(sample_letter_notification.id)
mock_get_letters_pdf.assert_called_once_with(
sample_letter_notification.template,
contact_block=sample_letter_notification.reply_to_text,
filename=None,
values=sample_letter_notification.personalisation
)
# We only need this while we are migrating to the new letter_branding model
def test_create_letters_gets_the_right_logo_when_service_has_letter_branding_logo(
notify_api, mocker, sample_letter_notification
):
letter_branding = create_letter_branding(name='test brand', filename='test-brand')
sample_letter_notification.service.letter_branding = letter_branding
mock_get_letters_pdf = mocker.patch('app.celery.letters_pdf_tasks.get_letters_pdf', return_value=(b'\x00\x01', 1))
mocker.patch('app.letters.utils.s3upload')
mocker.patch('app.celery.letters_pdf_tasks.update_notification_status_by_id')
create_letters_pdf(sample_letter_notification.id)
mock_get_letters_pdf.assert_called_once_with(
sample_letter_notification.template,
contact_block=sample_letter_notification.reply_to_text,
filename=sample_letter_notification.service.letter_branding.filename,
values=sample_letter_notification.personalisation
)
def test_collate_letter_pdfs_for_day(notify_api, mocker):
mock_s3 = mocker.patch('app.celery.tasks.s3.get_s3_bucket_objects', return_value=[
{'Key': 'B.pDf', 'Size': 2},
{'Key': 'A.PDF', 'Size': 1},
{'Key': 'C.pdf', 'Size': 3}
])
mock_group_letters = mocker.patch('app.celery.letters_pdf_tasks.group_letters', return_value=[
[{'Key': 'A.PDF', 'Size': 1}, {'Key': 'B.pDf', 'Size': 2}],
[{'Key': 'C.pdf', 'Size': 3}]
])
mock_celery = mocker.patch('app.celery.letters_pdf_tasks.notify_celery.send_task')
collate_letter_pdfs_for_day('2017-01-02')
mock_s3.assert_called_once_with('test-letters-pdf', subfolder='2017-01-02')
mock_group_letters.assert_called_once_with(sorted(mock_s3.return_value, key=lambda x: x['Key']))
assert mock_celery.call_args_list[0] == call(
name='zip-and-send-letter-pdfs',
kwargs={
'filenames_to_zip': ['A.PDF', 'B.pDf'],
'upload_filename': 'NOTIFY.2017-01-02.001.oqdjIM2-NAUU9Sm5Slmi.ZIP'
},
queue='process-ftp-tasks',
compression='zlib'
)
assert mock_celery.call_args_list[1] == call(
name='zip-and-send-letter-pdfs',
kwargs={
'filenames_to_zip': ['C.pdf'],
'upload_filename': 'NOTIFY.2017-01-02.002.tdr7hcdPieiqjkVoS4kU.ZIP'
},
queue='process-ftp-tasks',
compression='zlib'
)
@freeze_time('2018-09-12 17:50:00')
def test_collate_letter_pdfs_for_day_works_without_date_param(notify_api, mocker):
mock_s3 = mocker.patch('app.celery.tasks.s3.get_s3_bucket_objects')
collate_letter_pdfs_for_day()
expected_date = '2018-09-12'
mock_s3.assert_called_once_with('test-letters-pdf', subfolder=expected_date)
def test_group_letters_splits_on_file_size(notify_api, mocker):
mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=True)
letters = [
# ends under max but next one is too big
{'Key': 'A.pdf', 'Size': 1}, {'Key': 'B.pdf', 'Size': 2},
# ends on exactly max
{'Key': 'C.pdf', 'Size': 3}, {'Key': 'D.pdf', 'Size': 1}, {'Key': 'E.pdf', 'Size': 1},
# exactly max goes in next file
{'Key': 'F.pdf', 'Size': 5},
# if it's bigger than the max, still gets included
{'Key': 'G.pdf', 'Size': 6},
# whatever's left goes in last list
{'Key': 'H.pdf', 'Size': 1}, {'Key': 'I.pdf', 'Size': 1},
]
with set_config_values(notify_api, {'MAX_LETTER_PDF_ZIP_FILESIZE': 5}):
x = group_letters(letters)
assert next(x) == [{'Key': 'A.pdf', 'Size': 1}, {'Key': 'B.pdf', 'Size': 2}]
assert next(x) == [{'Key': 'C.pdf', 'Size': 3}, {'Key': 'D.pdf', 'Size': 1}, {'Key': 'E.pdf', 'Size': 1}]
assert next(x) == [{'Key': 'F.pdf', 'Size': 5}]
assert next(x) == [{'Key': 'G.pdf', 'Size': 6}]
assert next(x) == [{'Key': 'H.pdf', 'Size': 1}, {'Key': 'I.pdf', 'Size': 1}]
# make sure iterator is exhausted
assert next(x, None) is None
def test_group_letters_splits_on_file_count(notify_api, mocker):
mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=True)
letters = [
{'Key': 'A.pdf', 'Size': 1},
{'Key': 'B.pdf', 'Size': 2},
{'Key': 'C.pdf', 'Size': 3},
{'Key': 'D.pdf', 'Size': 1},
{'Key': 'E.pdf', 'Size': 1},
{'Key': 'F.pdf', 'Size': 5},
{'Key': 'G.pdf', 'Size': 6},
{'Key': 'H.pdf', 'Size': 1},
{'Key': 'I.pdf', 'Size': 1},
]
with set_config_values(notify_api, {'MAX_LETTER_PDF_COUNT_PER_ZIP': 3}):
x = group_letters(letters)
assert next(x) == [{'Key': 'A.pdf', 'Size': 1}, {'Key': 'B.pdf', 'Size': 2}, {'Key': 'C.pdf', 'Size': 3}]
assert next(x) == [{'Key': 'D.pdf', 'Size': 1}, {'Key': 'E.pdf', 'Size': 1}, {'Key': 'F.pdf', 'Size': 5}]
assert next(x) == [{'Key': 'G.pdf', 'Size': 6}, {'Key': 'H.pdf', 'Size': 1}, {'Key': 'I.pdf', 'Size': 1}]
# make sure iterator is exhausted
assert next(x, None) is None
def test_group_letters_splits_on_file_size_and_file_count(notify_api, mocker):
mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=True)
letters = [
# ends under max file size but next file is too big
{'Key': 'A.pdf', 'Size': 1},
{'Key': 'B.pdf', 'Size': 2},
# ends on exactly max number of files and file size
{'Key': 'C.pdf', 'Size': 3},
{'Key': 'D.pdf', 'Size': 1},
{'Key': 'E.pdf', 'Size': 1},
# exactly max file size goes in next file
{'Key': 'F.pdf', 'Size': 5},
# file size is within max but number of files reaches limit
{'Key': 'G.pdf', 'Size': 1},
{'Key': 'H.pdf', 'Size': 1},
{'Key': 'I.pdf', 'Size': 1},
# whatever's left goes in last list
{'Key': 'J.pdf', 'Size': 1},
]
with set_config_values(notify_api, {
'MAX_LETTER_PDF_ZIP_FILESIZE': 5,
'MAX_LETTER_PDF_COUNT_PER_ZIP': 3
}):
x = group_letters(letters)
assert next(x) == [{'Key': 'A.pdf', 'Size': 1}, {'Key': 'B.pdf', 'Size': 2}]
assert next(x) == [{'Key': 'C.pdf', 'Size': 3}, {'Key': 'D.pdf', 'Size': 1}, {'Key': 'E.pdf', 'Size': 1}]
assert next(x) == [{'Key': 'F.pdf', 'Size': 5}]
assert next(x) == [{'Key': 'G.pdf', 'Size': 1}, {'Key': 'H.pdf', 'Size': 1}, {'Key': 'I.pdf', 'Size': 1}]
assert next(x) == [{'Key': 'J.pdf', 'Size': 1}]
# make sure iterator is exhausted
assert next(x, None) is None
def test_group_letters_ignores_non_pdfs(notify_api, mocker):
mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=True)
letters = [{'Key': 'A.zip'}]
assert list(group_letters(letters)) == []
def test_group_letters_ignores_notifications_already_sent(notify_api, mocker):
mock = mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=False)
letters = [{'Key': 'A.pdf'}]
assert list(group_letters(letters)) == []
mock.assert_called_once_with('A.pdf')
def test_group_letters_with_no_letters(notify_api, mocker):
mocker.patch('app.celery.letters_pdf_tasks.letter_in_created_state', return_value=True)
assert list(group_letters([])) == []
def test_letter_in_created_state(sample_notification):
sample_notification.reference = 'ABCDEF1234567890'
filename = '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF'
assert letter_in_created_state(filename) is True
def test_letter_in_created_state_fails_if_notification_not_in_created(sample_notification):
sample_notification.reference = 'ABCDEF1234567890'
sample_notification.status = NOTIFICATION_SENDING
filename = '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF'
assert letter_in_created_state(filename) is False
def test_letter_in_created_state_fails_if_notification_doesnt_exist(sample_notification):
sample_notification.reference = 'QWERTY1234567890'
filename = '2018-01-13/NOTIFY.ABCDEF1234567890.D.2.C.C.20180113120000.PDF'
assert letter_in_created_state(filename) is False
@freeze_time('2018-01-01 18:00')
@mock_s3
@pytest.mark.parametrize('key_type,noti_status,bucket_config_name,destination_folder', [
(KEY_TYPE_NORMAL, NOTIFICATION_CREATED, 'LETTERS_PDF_BUCKET_NAME', '2018-01-01/'),
(KEY_TYPE_TEST, NOTIFICATION_DELIVERED, 'TEST_LETTERS_BUCKET_NAME', '')
])
def test_process_letter_task_check_virus_scan_passed(
sample_letter_template, mocker, key_type, noti_status, bucket_config_name, destination_folder
):
letter_notification = create_notification(template=sample_letter_template, billable_units=0,
status='pending-virus-check', key_type=key_type,
reference='{} letter'.format(key_type))
filename = 'NOTIFY.{}'.format(letter_notification.reference)
source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
target_bucket_name = current_app.config[bucket_config_name]
conn = boto3.resource('s3', region_name='ca-central-1')
conn.create_bucket(Bucket=source_bucket_name)
conn.create_bucket(Bucket=target_bucket_name)
s3 = boto3.client('s3', region_name='ca-central-1')
s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'old_pdf')
mock_get_page_count = mocker.patch('app.celery.letters_pdf_tasks.get_page_count', return_value=1)
mock_s3upload = mocker.patch('app.celery.letters_pdf_tasks.s3upload')
endpoint = 'http://localhost:9999/precompiled/sanitise'
with requests_mock.mock() as rmock:
rmock.request(
"POST",
endpoint,
json={
"file": base64.b64encode(b"new_pdf").decode("utf-8"),
"validation_passed": True,
"errors": {
"content_outside_of_printable_area": [],
"document_not_a4_size_portrait_orientation": [],
}
},
status_code=200
)
process_virus_scan_passed(filename)
assert letter_notification.status == noti_status
assert letter_notification.billable_units == 1
assert rmock.called
assert rmock.request_history[0].url == endpoint
mock_s3upload.assert_called_once_with(
bucket_name=target_bucket_name,
filedata=b'new_pdf',
file_location=destination_folder + filename,
region='ca-central-1',
)
mock_get_page_count.assert_called_once_with(b'old_pdf')
@freeze_time('2018-01-01 18:00')
@mock_s3
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEST])
def test_process_letter_task_check_virus_scan_passed_when_sanitise_fails(
sample_letter_notification, mocker, key_type
):
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
target_bucket_name = current_app.config['INVALID_PDF_BUCKET_NAME']
conn = boto3.resource('s3', region_name='ca-central-1')
conn.create_bucket(Bucket=source_bucket_name)
conn.create_bucket(Bucket=target_bucket_name)
s3 = boto3.client('s3', region_name='ca-central-1')
s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content')
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
sample_letter_notification.key_type = key_type
mock_move_s3 = mocker.patch('app.letters.utils._move_s3_object')
mock_sanitise = mocker.patch('app.celery.letters_pdf_tasks._sanitise_precompiled_pdf', return_value=None)
mock_get_page_count = mocker.patch('app.celery.letters_pdf_tasks.get_page_count', return_value=2)
process_virus_scan_passed(filename)
assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED
assert sample_letter_notification.billable_units == 0
mock_sanitise.assert_called_once_with(
ANY,
sample_letter_notification,
b'pdf_content'
)
mock_move_s3.assert_called_once_with(
source_bucket_name, filename,
target_bucket_name, filename
)
mock_get_page_count.assert_called_once_with(b'pdf_content')
@freeze_time('2018-01-01 18:00')
@mock_s3
@pytest.mark.parametrize('key_type,notification_status,bucket_config_name', [
(KEY_TYPE_NORMAL, NOTIFICATION_CREATED, 'LETTERS_PDF_BUCKET_NAME'),
(KEY_TYPE_TEST, NOTIFICATION_DELIVERED, 'TEST_LETTERS_BUCKET_NAME')
])
def test_process_letter_task_check_virus_scan_passed_when_redaction_fails(
sample_letter_notification, mocker, key_type, notification_status, bucket_config_name
):
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
target_bucket_name = current_app.config[bucket_config_name]
conn = boto3.resource('s3', region_name='eu-west-1')
conn.create_bucket(Bucket=bucket_name)
conn.create_bucket(Bucket=target_bucket_name)
s3 = boto3.client('s3', region_name='eu-west-1')
s3.put_object(Bucket=bucket_name, Key=filename, Body=b'pdf_content')
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
sample_letter_notification.key_type = key_type
mock_copy_s3 = mocker.patch('app.letters.utils._copy_s3_object')
mocker.patch('app.celery.letters_pdf_tasks.get_page_count', return_value=2)
endpoint = 'http://localhost:9999/precompiled/sanitise'
with requests_mock.mock() as rmock:
rmock.request(
"POST",
endpoint,
json={
"file": base64.b64encode(b"new_pdf").decode("utf-8"),
"validation_passed": True,
"redaction_failed_message": "No matches for address block during redaction procedure",
"errors": {
"content_outside_of_printable_area": [],
"document_not_a4_size_portrait_orientation": []
}
},
status_code=200
)
process_virus_scan_passed(filename)
assert sample_letter_notification.billable_units == 2
assert sample_letter_notification.status == notification_status
if key_type == KEY_TYPE_NORMAL:
mock_copy_s3.assert_called_once_with(
bucket_name, filename,
bucket_name, 'REDACTION_FAILURE/' + filename
)
else:
mock_copy_s3.assert_not_called()
@freeze_time('2018-01-01 18:00')
@mock_s3
@pytest.mark.parametrize('key_type', [KEY_TYPE_NORMAL, KEY_TYPE_TEST])
def test_process_letter_task_check_virus_scan_passed_when_file_cannot_be_opened(
sample_letter_notification, mocker, key_type
):
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
target_bucket_name = current_app.config['INVALID_PDF_BUCKET_NAME']
conn = boto3.resource('s3', region_name='ca-central-1')
conn.create_bucket(Bucket=source_bucket_name)
conn.create_bucket(Bucket=target_bucket_name)
s3 = boto3.client('s3', region_name='ca-central-1')
s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content')
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
sample_letter_notification.key_type = key_type
mock_move_s3 = mocker.patch('app.letters.utils._move_s3_object')
mock_get_page_count = mocker.patch('app.celery.letters_pdf_tasks.get_page_count', side_effect=PdfReadError)
mock_sanitise = mocker.patch('app.celery.letters_pdf_tasks._sanitise_precompiled_pdf')
process_virus_scan_passed(filename)
mock_sanitise.assert_not_called()
mock_get_page_count.assert_called_once_with(b'pdf_content')
mock_move_s3.assert_called_once_with(
source_bucket_name, filename,
target_bucket_name, filename
)
assert sample_letter_notification.status == NOTIFICATION_VALIDATION_FAILED
assert sample_letter_notification.billable_units == 0
@mock_s3
def test_process_virus_scan_passed_logs_error_and_sets_tech_failure_if_s3_error_uploading_to_live_bucket(
mocker,
sample_letter_notification,
):
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception')
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
source_bucket_name = current_app.config['LETTERS_SCAN_BUCKET_NAME']
conn = boto3.resource('s3', region_name='ca-central-1')
conn.create_bucket(Bucket=source_bucket_name)
s3 = boto3.client('s3', region_name='ca-central-1')
s3.put_object(Bucket=source_bucket_name, Key=filename, Body=b'pdf_content')
mocker.patch('app.celery.letters_pdf_tasks.get_page_count', return_value=1)
error_response = {
'Error': {
'Code': 'InvalidParameterValue',
'Message': 'some error message from amazon',
'Type': 'Sender'
}
}
mocker.patch('app.celery.letters_pdf_tasks._upload_pdf_to_test_or_live_pdf_bucket',
side_effect=ClientError(error_response, 'operation_name'))
endpoint = 'http://localhost:9999/precompiled/sanitise'
with requests_mock.mock() as rmock:
rmock.request(
"POST",
endpoint,
json={
"file": base64.b64encode(b"new_pdf").decode("utf-8"),
"validation_passed": True,
"errors": {
"content_outside_of_printable_area": [],
"document_not_a4_size_portrait_orientation": [],
}
},
status_code=200
)
process_virus_scan_passed(filename)
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
mock_logger.assert_called_once_with(
'Error uploading letter to live pdf bucket for notification: {}'.format(sample_letter_notification.id)
)
def test_move_invalid_letter_and_update_status_logs_error_and_sets_tech_failure_state_if_s3_error(
mocker,
sample_letter_notification,
):
error_response = {
'Error': {
'Code': 'InvalidParameterValue',
'Message': 'some error message from amazon',
'Type': 'Sender'
}
}
mocker.patch('app.celery.letters_pdf_tasks.move_scan_to_invalid_pdf_bucket',
side_effect=ClientError(error_response, 'operation_name'))
mock_logger = mocker.patch('app.celery.tasks.current_app.logger.exception')
_move_invalid_letter_and_update_status(sample_letter_notification, 'filename', mocker.Mock())
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
mock_logger.assert_called_once_with(
'Error when moving letter with id {} to invalid PDF bucket'.format(sample_letter_notification.id)
)
def test_process_letter_task_check_virus_scan_failed(sample_letter_notification, mocker):
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
mock_move_failed_pdf = mocker.patch('app.celery.letters_pdf_tasks.move_failed_pdf')
with pytest.raises(VirusScanError) as e:
process_virus_scan_failed(filename)
assert "Virus scan failed:" in str(e)
mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.FAILURE)
assert sample_letter_notification.status == NOTIFICATION_VIRUS_SCAN_FAILED
def test_process_letter_task_check_virus_scan_error(sample_letter_notification, mocker):
filename = 'NOTIFY.{}'.format(sample_letter_notification.reference)
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
mock_move_failed_pdf = mocker.patch('app.celery.letters_pdf_tasks.move_failed_pdf')
with pytest.raises(VirusScanError) as e:
process_virus_scan_error(filename)
assert "Virus scan error:" in str(e.value)
mock_move_failed_pdf.assert_called_once_with(filename, ScanErrorType.ERROR)
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
def test_replay_letters_in_error_for_all_letters_in_error_bucket(notify_api, mocker):
mockObject = boto3.resource('s3').Object('ERROR', 'ERROR/file_name')
mocker.patch("app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", return_value=[mockObject])
mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket")
mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task")
replay_letters_in_error()
mock_move.assert_called_once_with('file_name')
mock_celery.assert_called_once_with(name='scan-file', kwargs={'filename': 'file_name'}, queue='antivirus-tasks')
def test_replay_letters_in_error_for_one_file(notify_api, mocker):
mockObject = boto3.resource('s3').Object('ERROR', 'ERROR/file_name')
mocker.patch("app.celery.letters_pdf_tasks.get_file_names_from_error_bucket", return_value=[mockObject])
mock_move = mocker.patch("app.celery.letters_pdf_tasks.move_error_pdf_to_scan_bucket")
mock_celery = mocker.patch("app.celery.letters_pdf_tasks.notify_celery.send_task")
replay_letters_in_error("file_name")
mock_move.assert_called_once_with('file_name')
mock_celery.assert_called_once_with(name='scan-file', kwargs={'filename': 'file_name'}, queue='antivirus-tasks')
def test_sanitise_precompiled_pdf_returns_data_from_template_preview(rmock, sample_letter_notification):
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
endpoint = 'http://localhost:9999/precompiled/sanitise'
with requests_mock.mock() as rmock:
rmock.request(
"POST",
endpoint,
json={
"file": base64.b64encode(b"new_pdf").decode("utf-8"),
"validation_passed": True,
"errors": {
"content_outside_of_printable_area": [],
"document_not_a4_size_portrait_orientation": [],
}
},
status_code=200
)
mock_celery = Mock(**{'retry.side_effect': Retry})
response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf')
assert rmock.called
assert rmock.request_history[0].url == endpoint
assert base64.b64decode(response.json()["file"].encode()) == b"new_pdf"
assert rmock.last_request.text == 'old_pdf'
def test_sanitise_precompiled_pdf_returns_none_on_validation_error(rmock, sample_letter_notification):
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
endpoint = 'http://localhost:9999/precompiled/sanitise'
with requests_mock.mock() as rmock:
rmock.request(
"POST",
endpoint,
json={
"file": base64.b64encode(b"nyan").decode("utf-8"),
"validation_passed": False,
"errors": {
"content_outside_of_printable_area": [1],
"document_not_a4_size_portrait_orientation": [],
}
},
status_code=400
)
mock_celery = Mock(**{'retry.side_effect': Retry})
response = _sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf')
assert rmock.called
assert rmock.request_history[0].url == endpoint
assert response is None
def test_sanitise_precompiled_pdf_passes_the_service_id_and_notification_id_to_template_preview(
mocker,
sample_letter_notification,
):
tp_mock = mocker.patch('app.celery.letters_pdf_tasks.requests_post')
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
mock_celery = Mock(**{'retry.side_effect': Retry})
_sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf')
service_id = str(sample_letter_notification.service_id)
notification_id = str(sample_letter_notification.id)
tp_mock.assert_called_once_with(
'http://localhost:9999/precompiled/sanitise',
data=b'old_pdf',
headers={'Authorization': 'Token my-secret-key',
'Service-ID': service_id,
'Notification-ID': notification_id}
)
def test_sanitise_precompiled_pdf_retries_on_http_error(rmock, sample_letter_notification):
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
rmock.post('http://localhost:9999/precompiled/sanitise', content=b'new_pdf', status_code=500)
mock_celery = Mock(**{'retry.side_effect': Retry})
with pytest.raises(Retry):
_sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf')
def test_sanitise_precompiled_pdf_sets_notification_to_technical_failure_after_too_many_errors(
rmock,
sample_letter_notification
):
sample_letter_notification.status = NOTIFICATION_PENDING_VIRUS_CHECK
rmock.post('http://localhost:9999/precompiled/sanitise', content=b'new_pdf', status_code=500)
mock_celery = Mock(**{'retry.side_effect': MaxRetriesExceededError})
with pytest.raises(MaxRetriesExceededError):
_sanitise_precompiled_pdf(mock_celery, sample_letter_notification, b'old_pdf')
assert sample_letter_notification.status == NOTIFICATION_TECHNICAL_FAILURE
| 41.760546
| 118
| 0.7103
|
e05acdd6b42cc495a8e5051ad2a98041c85ee74f
| 86,975
|
py
|
Python
|
python/paddle/fluid/tests/unittests/op_test.py
|
jc19chaoj/Paddle
|
d89f246ccdcbaeb6d790333aef43295854863a2d
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/op_test.py
|
jc19chaoj/Paddle
|
d89f246ccdcbaeb6d790333aef43295854863a2d
|
[
"Apache-2.0"
] | null | null | null |
python/paddle/fluid/tests/unittests/op_test.py
|
jc19chaoj/Paddle
|
d89f246ccdcbaeb6d790333aef43295854863a2d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import unittest
import warnings
import numpy as np
import random
import six
import struct
import time
import itertools
import collections
from collections import defaultdict
from copy import copy
import paddle
import paddle.fluid as fluid
import paddle.fluid.core as core
from paddle.fluid.framework import _test_eager_guard
from paddle.fluid.backward import append_backward
from paddle.fluid.op import Operator
from paddle.fluid.executor import Executor
from paddle.fluid.framework import Program, OpProtoHolder, Variable, _current_expected_place
from paddle.fluid.tests.unittests.testsuite import (
create_op,
set_input,
append_input_output,
append_loss_ops, )
from paddle.fluid import unique_name
from paddle.fluid.tests.unittests.white_list import (
op_accuracy_white_list,
check_shape_white_list,
compile_vs_runtime_white_list,
no_check_set_white_list,
op_threshold_white_list,
no_grad_set_white_list, )
def check_out_dtype(api_fn, in_specs, expect_dtypes, target_index=0, **configs):
"""
Determines whether dtype of output tensor is as expected.
Args:
api_fn(callable): paddle api function
in_specs(list[tuple]): list of shape and dtype information for constructing input tensor of api_fn, such as [(shape, dtype), (shape, dtype)].
expected_dtype(list[str]): expected dtype of output tensor.
target_index(int): indicate which one from in_specs to infer the dtype of output.
config(dict): other arguments of paddle api function
Example:
check_out_dtype(fluid.layers.pad_constant_like, [([2,3,2,3], 'float64'), ([1, 3, 1,3], )], ['float32', 'float64', 'int64'], target_index=1, pad_value=0.)
"""
paddle.enable_static()
for i, expect_dtype in enumerate(expect_dtypes):
with paddle.static.program_guard(paddle.static.Program()):
input_t = []
for index, spec in enumerate(in_specs):
if len(spec) == 1:
shape = spec[0]
dtype = expect_dtype if target_index == index else 'float32'
elif len(spec) == 2:
shape, dtype = spec
else:
raise ValueError(
"Value of in_specs[{}] should contains two elements: [shape, dtype]".
format(index))
input_t.append(
paddle.static.data(
name='data_%s' % index, shape=shape, dtype=dtype))
out = api_fn(*input_t, **configs)
out_dtype = fluid.data_feeder.convert_dtype(out.dtype)
if out_dtype != expect_dtype:
raise ValueError(
"Expected out.dtype is {}, but got {} from {}.".format(
expect_dtype, out_dtype, api_fn.__name__))
def _set_use_system_allocator(value=None):
USE_SYSTEM_ALLOCATOR_FLAG = "FLAGS_use_system_allocator"
old_value = core.globals()[USE_SYSTEM_ALLOCATOR_FLAG]
value = old_value if value is None else value
core.globals()[USE_SYSTEM_ALLOCATOR_FLAG] = value
return old_value
def randomize_probability(batch_size, class_num, dtype='float32'):
prob = np.random.uniform(
0.1, 1.0, size=(batch_size, class_num)).astype(dtype)
prob_sum = prob.sum(axis=1)
for i in six.moves.xrange(len(prob)):
prob[i] /= prob_sum[i]
return prob
def get_numeric_gradient(place,
scope,
op,
inputs,
input_to_check,
output_names,
delta=0.005,
in_place=False):
# FIXME: change this method by compile time concepts
set_input(scope, op, inputs, place)
def product(dim):
return six.moves.reduce(lambda a, b: a * b, dim, 1)
tensor_to_check = scope.find_var(input_to_check).get_tensor()
tensor_size = product(tensor_to_check.shape())
tensor_to_check_dtype = tensor_to_check._dtype()
if tensor_to_check_dtype == core.VarDesc.VarType.FP32:
tensor_to_check_dtype = np.float32
elif tensor_to_check_dtype == core.VarDesc.VarType.FP64:
tensor_to_check_dtype = np.float64
elif tensor_to_check_dtype == core.VarDesc.VarType.FP16:
tensor_to_check_dtype = np.float16
# set delta as np.float16, will automatic convert to float32, float64
delta = np.array(delta).astype(np.float16)
elif tensor_to_check_dtype == core.VarDesc.VarType.BF16:
tensor_to_check_dtype = np.float32
elif tensor_to_check_dtype == core.VarDesc.VarType.COMPLEX64:
tensor_to_check_dtype = np.complex64
elif tensor_to_check_dtype == core.VarDesc.VarType.COMPLEX128:
tensor_tp_check_dtype = np.complex128
else:
raise ValueError("Not supported data type " + str(tensor_to_check_dtype)
+ ", tensor name : " + str(input_to_check))
def get_output():
sum = []
op.run(scope, place)
for output_name in output_names:
output_numpy = np.array(scope.find_var(output_name).get_tensor())
# numpy.dtype does not have bfloat16, thus we use numpy.uint16 to
# store bfloat16 data, and need to be converted to float to check
# the floating precision.
if tensor_to_check._dtype() == core.VarDesc.VarType.BF16:
output_numpy = convert_uint16_to_float(output_numpy)
sum.append(output_numpy.astype(tensor_to_check_dtype).mean())
return tensor_to_check_dtype(np.array(sum).sum() / len(output_names))
gradient_flat = np.zeros(shape=(tensor_size, ), dtype=tensor_to_check_dtype)
def __get_elem__(tensor, i):
if tensor_to_check_dtype == np.float16:
numpy_tensor = np.array(tensor).astype(np.float16)
numpy_tensor = numpy_tensor.flatten()
return numpy_tensor[i]
elif tensor_to_check._dtype() == core.VarDesc.VarType.BF16:
numpy_tensor = np.array(tensor).astype(np.uint16)
numpy_tensor = numpy_tensor.flatten()
return struct.unpack('<f', struct.pack('<I', numpy_tensor[i]
<< 16))[0]
elif tensor_to_check_dtype == np.float32:
return tensor._get_float_element(i)
elif tensor_to_check_dtype == np.float64:
return tensor._get_double_element(i)
else:
raise TypeError("Unsupported test data type %s." %
tensor_to_check_dtype)
def __set_elem__(tensor, i, e):
if tensor_to_check_dtype == np.float16:
numpy_tensor = np.array(tensor).astype(np.float16)
shape = numpy_tensor.shape
numpy_tensor = numpy_tensor.flatten()
numpy_tensor[i] = e
numpy_tensor = numpy_tensor.reshape(shape)
tensor.set(numpy_tensor, place)
elif tensor_to_check._dtype() == core.VarDesc.VarType.BF16:
numpy_tensor = np.array(tensor).astype(np.uint16)
shape = numpy_tensor.shape
numpy_tensor = numpy_tensor.flatten()
numpy_tensor[i] = np.uint16(copy_bits_from_float_to_uint16(e))
numpy_tensor = numpy_tensor.reshape(shape)
tensor.set(numpy_tensor, place)
elif tensor_to_check_dtype == np.float32:
tensor._set_float_element(i, e)
elif tensor_to_check_dtype == np.float64:
tensor._set_double_element(i, e)
else:
raise TypeError("Unsupported test data type %s." %
tensor_to_check_dtype)
# we only compute gradient of one element each time.
# we use a for loop to compute the gradient of every element.
for i in six.moves.xrange(tensor_size):
if in_place:
set_input(scope, op, inputs, place)
# get one input element throw it's index i.
origin = __get_elem__(tensor_to_check, i)
# add delta to it, run op and then get the sum of the result tensor.
x_pos = origin + delta
__set_elem__(tensor_to_check, i, x_pos)
y_pos = get_output()
if in_place:
set_input(scope, op, inputs, place)
x_neg = origin - delta
__set_elem__(tensor_to_check, i, x_neg)
y_neg = get_output()
__set_elem__(tensor_to_check, i, origin)
gradient_flat[i] = (y_pos - y_neg) / delta / 2
return gradient_flat.reshape(tensor_to_check.shape())
def skip_check_grad_ci(reason=None):
"""Decorator to skip check_grad CI.
Check_grad is required for Op test cases. However, there are some special
cases that do not need to do check_grad. This decorator is used to skip the
check_grad of the above cases.
Note: the execution of unit test will not be skipped. It just avoids check_grad
checking in tearDownClass method by setting a `no_need_check_grad` flag.
Example:
@skip_check_grad_ci(reason="For inference, check_grad is not required.")
class TestInference(OpTest):
"""
if not isinstance(reason, str):
raise AssertionError("The reason for skipping check_grad is required.")
def wrapper(cls):
cls.no_need_check_grad = True
return cls
return wrapper
def copy_bits_from_float_to_uint16(f):
return struct.unpack('<I', struct.pack('<f', f))[0] >> 16
def convert_float_to_uint16(float_list, data_format="NCHW"):
if data_format == "NHWC":
float_list = np.transpose(float_list, [0, 3, 1, 2])
new_output = []
for x in np.nditer(float_list):
new_output.append(np.uint16(copy_bits_from_float_to_uint16(x)))
new_output = np.reshape(new_output, float_list.shape).view(np.uint16)
if data_format == "NHWC":
new_output = np.transpose(new_output, [0, 2, 3, 1])
return new_output
def convert_uint16_to_float(in_list):
in_list = np.asarray(in_list)
out = np.vectorize(
lambda x: struct.unpack('<f', struct.pack('<I', x << 16))[0],
otypes=[np.float32])(in_list.flat)
return np.reshape(out, in_list.shape)
class OpTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
'''Fix random seeds to remove randomness from tests'''
cls._np_rand_state = np.random.get_state()
cls._py_rand_state = random.getstate()
cls.call_once = False
cls.dtype = None
cls.outputs = {}
cls.input_shape_is_large = True
np.random.seed(123)
random.seed(124)
if paddle.is_compiled_with_npu():
cls._use_system_allocator = _set_use_system_allocator(False)
else:
cls._use_system_allocator = _set_use_system_allocator(True)
@classmethod
def tearDownClass(cls):
"""Restore random seeds"""
np.random.set_state(cls._np_rand_state)
random.setstate(cls._py_rand_state)
_set_use_system_allocator(cls._use_system_allocator)
def is_empty_grad_op(op_type):
all_op_kernels = core._get_all_register_op_kernels()
grad_op = op_type + '_grad'
if grad_op in all_op_kernels.keys():
if is_mkldnn_op_test():
grad_op_kernels = all_op_kernels[grad_op]
for grad_op_kernel in grad_op_kernels:
if 'MKLDNN' in grad_op_kernel:
return False
else:
return False
return True
def is_xpu_op_test():
return hasattr(cls, "use_xpu") and cls.use_xpu == True
def is_mkldnn_op_test():
return hasattr(cls, "use_mkldnn") and cls.use_mkldnn == True
def is_rocm_op_test():
return core.is_compiled_with_rocm()
def is_npu_op_test():
return hasattr(cls, "use_npu") and cls.use_npu == True
def is_mlu_op_test():
return hasattr(cls, "use_mlu") and cls.use_mlu == True
if not hasattr(cls, "op_type"):
raise AssertionError(
"This test do not have op_type in class attrs, "
"please set self.__class__.op_type=the_real_op_type manually.")
# case in NO_FP64_CHECK_GRAD_CASES and op in NO_FP64_CHECK_GRAD_OP_LIST should be fixed
if not hasattr(cls, "no_need_check_grad") \
and not is_empty_grad_op(cls.op_type):
if cls.dtype is None or \
(cls.dtype == np.float16 \
and cls.op_type not in op_accuracy_white_list.NO_FP16_CHECK_GRAD_OP_LIST \
and not hasattr(cls, "exist_check_grad")):
raise AssertionError("This test of %s op needs check_grad." %
cls.op_type)
# check for op test with fp64 precision, but not check mkldnn op test for now
if cls.dtype in [np.float32, np.float64] \
and cls.op_type not in op_accuracy_white_list.NO_FP64_CHECK_GRAD_OP_LIST \
and not hasattr(cls, 'exist_fp64_check_grad') \
and not is_xpu_op_test() \
and not is_mkldnn_op_test() \
and not is_rocm_op_test() \
and not is_npu_op_test() \
and not is_mlu_op_test():
raise AssertionError(
"This test of %s op needs check_grad with fp64 precision." %
cls.op_type)
if not cls.input_shape_is_large \
and cls.op_type not in check_shape_white_list.NEED_TO_FIX_OP_LIST:
raise AssertionError(
"Input's shape should be large than or equal to 100 for " +
cls.op_type + " Op.")
def try_call_once(self, data_type):
if not self.call_once:
self.call_once = True
self.dtype = data_type
def is_bfloat16_op(self):
# self.dtype is the dtype of inputs, and is set in infer_dtype_from_inputs_outputs.
# Make sure this function is called after calling infer_dtype_from_inputs_outputs.
return self.dtype == np.uint16 or (
hasattr(self, 'output_dtype') and
self.output_dtype == np.uint16) or (
hasattr(self, 'mkldnn_data_type') and
getattr(self, 'mkldnn_data_type') is "bfloat16") or (
hasattr(self, 'attrs') and
'mkldnn_data_type' in self.attrs and
self.attrs['mkldnn_data_type'] == 'bfloat16')
def is_mkldnn_op(self):
return (hasattr(self, "use_mkldnn") and self.use_mkldnn == True) or (
hasattr(self, "attrs") and "use_mkldnn" in self.attrs and
self.attrs["use_mkldnn"] == True)
def is_xpu_op(self):
return (hasattr(self, "use_xpu") and self.use_xpu == True) or (
hasattr(self, "attrs") and "use_xpu" in self.attrs and
self.attrs["use_xpu"] == True)
def infer_dtype_from_inputs_outputs(self, inputs, outputs):
def is_np_data(input):
return isinstance(input, (np.ndarray, np.generic))
def infer_dtype(numpy_dict, dtype_set):
assert isinstance(
numpy_dict,
dict), "self.inputs, self.outputs must be numpy_dict"
# the inputs are as follows:
# case 1: inputs = {'X': x}
# case 2: inputs = {'X': (x, x_lod)}
# case 3: inputs = {"X": [("x0", x0), ("x1", x1), ("x2", x2)]}
# case 4: inputs = {'X': [("x1", (x1, [x1_lod1])), ("x2", (x2, [x2_.lod2]))]}
# TODO(juncaipeng) infer dtype from inputs maybe obtain wrong type.
for _, var_value in six.iteritems(numpy_dict):
if is_np_data(var_value): # case 1
dtype_set.add(var_value.dtype)
elif isinstance(var_value, (list, tuple)): # case 2, 3, 4
for sub_val_value in var_value:
if is_np_data(sub_val_value): # case 2
dtype_set.add(sub_val_value.dtype)
elif len(sub_val_value) > 1 and is_np_data(
sub_val_value[1]): # case 3
dtype_set.add(sub_val_value[1].dtype)
elif len(sub_val_value) > 1 and isinstance(sub_val_value[1], (list, tuple)) \
and is_np_data(sub_val_value[1][0]): # case 4
dtype_set.add(sub_val_value[1][0].dtype)
# infer dtype from inputs, and dtype means the precision of the test
# collect dtype of all inputs
input_dtype_set = set()
infer_dtype(inputs, input_dtype_set)
dtype_list = [
np.dtype(np.float64), np.dtype(np.float32), np.dtype(np.float16),
np.dtype(np.int64), np.dtype(np.int32), np.dtype(np.uint16),
np.dtype(np.int16), np.dtype(np.int8), np.dtype(np.uint8),
np.dtype(np.bool)
]
# check the dtype in dtype_list in order, select the first dtype that in dtype_set
for dtype in dtype_list:
if dtype in input_dtype_set:
self.dtype = dtype
break
# save input dtype in class attr
self.__class__.dtype = self.dtype
# infer dtype of outputs
output_dtype_set = set()
infer_dtype(outputs, output_dtype_set)
for dtype in dtype_list:
if dtype in output_dtype_set:
self.output_dtype = dtype
break
def feed_var(self, input_vars, place):
feed_map = {}
for var_name in input_vars:
if isinstance(input_vars[var_name], list):
for name, np_value in self.inputs[var_name]:
tensor = core.LoDTensor()
if isinstance(np_value, tuple):
tensor.set(np_value[0], place)
tensor.set_recursive_sequence_lengths(np_value[1])
else:
tensor.set(np_value, place)
feed_map[name] = tensor
else:
tensor = core.LoDTensor()
if isinstance(self.inputs[var_name], tuple):
tensor.set(self.inputs[var_name][0], place)
tensor.set_recursive_sequence_lengths(self.inputs[var_name][
1])
else:
tensor.set(self.inputs[var_name], place)
feed_map[var_name] = tensor
return feed_map
def _append_ops(self, block):
self.__class__.op_type = self.op_type # for ci check, please not delete it for now
if self.is_mkldnn_op():
self.__class__.use_mkldnn = True
if self.is_xpu_op():
self.__class__.use_xpu = True
op_proto = OpProtoHolder.instance().get_op_proto(self.op_type)
"infer datatype from inputs and outputs for this test case"
self.infer_dtype_from_inputs_outputs(self.inputs, self.outputs)
inputs = append_input_output(block, op_proto, self.inputs, True,
self.dtype)
outputs = append_input_output(block, op_proto, self.outputs, False,
self.dtype)
if hasattr(self, "cache_name_list"):
for name in self.cache_name_list:
inputs[name] = block.create_var(
name=name,
persistable=True,
type=core.VarDesc.VarType.RAW,
stop_gradient=True)
op = block.append_op(
type=self.op_type,
inputs=inputs,
outputs=outputs,
attrs=copy(self.attrs) if hasattr(self, "attrs") else dict())
# infer variable type and infer shape in compile-time
op.desc.infer_var_type(block.desc)
op.desc.infer_shape(block.desc)
return op
def _get_io_vars(self, block, numpy_inputs):
inputs = {}
for name, value in six.iteritems(numpy_inputs):
if isinstance(value, list):
var_list = [
block.var(sub_name) for sub_name, sub_value in value
]
inputs[name] = var_list
else:
inputs[name] = block.var(name)
return inputs
def _get_inputs(self, block):
return self._get_io_vars(block, self.inputs)
def _get_outputs(self, block):
return self._get_io_vars(block, self.outputs)
def calc_output(self, place):
outs, _ = self._calc_output(place)
return outs
def _create_var_from_numpy(self, value):
if isinstance(value, tuple):
data = value[0]
lod = value[1]
v = fluid.dygraph.base.to_variable(value=data)
v.value().get_tensor().set_recursive_sequence_lengths(lod)
return v
else:
return fluid.dygraph.base.to_variable(value)
def get_sequence_batch_size_1_input(self, lod=None, shape=None):
"""Get LoD input data whose batch size is 1.
All sequence related OP unittests should call this function to contain the case of batch size = 1.
Args:
lod (list[list of int], optional): Length-based LoD, length of lod[0] should be 1. Default: [[13]].
shape (list, optional): Shape of input, shape[0] should be equals to lod[0][0]. Default: [13, 23].
Returns:
tuple (ndarray, lod) : LoD input data whose batch size is 1.
"""
if lod is None:
lod = [[13]]
if shape is None:
shape = [13, 23]
assert len(lod[0]) == 1
assert lod[0][0] == shape[0]
x = np.random.uniform(0.1, 1, shape).astype('float32')
return (x, lod)
def lod_has_single_zero(self, lod):
for i in range(len(lod) - 2):
if lod[i] != 0 and lod[i + 1] == 0 and lod[i + 2] != 0:
return True
return False
def lod_has_continuous_zero(self, lod):
for i in range(len(lod) - 3):
if lod[i] != 0 and lod[i + 1] == 0 and lod[i + 2] == 0 and lod[
i + 3] != 0:
return True
return False
def get_sequence_instance_size_0_input(self, lod=None, shape=None):
"""Get LoD input data whose instance size is 0.
All sequence related OP unittests should call this function to contain the case of instance size is 0.
Args:
lod (list[list of int], optional): Length-based LoD, lod[0]'s size must at least eight, lod[0] must at least two zeros at the beginning and at least two zeros at the end, the middle position of lod[0] contains a single zero and multiple zero. Default: [[0, 0, 4, 0, 3, 0, 0, 5, 0, 0]].
shape (list, optional): Shape of input, shape[0] should be equals to lod[0][0]. Default: [13, 23].
Returns:
tuple (ndarray, lod): LoD input data whose instance size is 0.
"""
if lod is None:
lod = [[0, 0, 4, 0, 3, 0, 0, 5, 0, 0]]
if shape is None:
shape = [12, 10]
assert len(lod[0]) >= 8
assert lod[0][0] == 0 and lod[0][1] == 0 and lod[0][-1] == 0 and lod[0][
-2] == 0
assert self.lod_has_single_zero(lod[0]) is True
assert self.lod_has_continuous_zero(lod[0]) is True
assert sum(lod[0]) == shape[0]
x = np.random.uniform(0.1, 1, shape).astype('float32')
return (x, lod)
def append_input_output_for_dygraph(self, op_proto, np_list, is_input,
if_return_inputs_grad_dict, block):
def create_var(np_value, name, is_input, if_return_inputs_grad_dict):
np_value_temp = np_value
has_lod = False
lod_temp = None
if isinstance(np_value, tuple):
np_value_temp = np_value[0]
has_lod = True
lod_temp = np_value[1]
if is_input:
v = self._create_var_from_numpy(np_value_temp)
if if_return_inputs_grad_dict:
v.stop_gradient = False
if has_lod:
v.value().get_tensor().set_recursive_sequence_lengths(
lod_temp)
else:
v = block.create_var(
name=name,
dtype=np_value_temp.dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False)
return v
# prepare variable for input or output
var_dict = defaultdict(list)
if if_return_inputs_grad_dict:
inputs_grad_dict = defaultdict()
proto_list = op_proto.inputs if is_input else op_proto.outputs
for var_proto in proto_list:
name = var_proto.name
if (name not in np_list) and var_proto.dispensable:
continue
if name not in np_list:
assert var_proto.intermediate, "{} not found".format(name)
v = block.create_var(
dtype='float32', type=core.VarDesc.VarType.LOD_TENSOR)
var_dict[name].append(v)
if if_return_inputs_grad_dict:
inputs_grad_dict[name] = v
continue
if var_proto.duplicable:
assert isinstance(
np_list[name],
list), "Duplicable {} should be set as list".format(name)
var_list = []
slot_name = name
for (name, np_value) in np_list[name]:
v = create_var(np_value, name, is_input,
if_return_inputs_grad_dict)
var_list.append(v)
if if_return_inputs_grad_dict:
inputs_grad_dict[name] = v
var_dict[slot_name] = var_list
else:
nplist_value_temp = None
name_temp = None
if isinstance(np_list[name], list):
nplist_value_temp = np_list[name][0]
name_temp = name
else:
nplist_value_temp = np_list[name]
name_temp = unique_name.generate("%s_out" % (name))
v = create_var(nplist_value_temp, name_temp, is_input,
if_return_inputs_grad_dict)
var_dict[name].append(v)
if if_return_inputs_grad_dict:
inputs_grad_dict[name] = v
if if_return_inputs_grad_dict:
return var_dict, inputs_grad_dict
else:
return var_dict
def _calc_dygraph_output(self, place, parallel=False, no_check_set=None):
self.__class__.op_type = self.op_type # for ci check, please not delete it for now
with fluid.dygraph.base.guard(place=place):
block = fluid.default_main_program().global_block()
op_proto = OpProtoHolder.instance().get_op_proto(self.op_type)
# prepare input variable
inputs = self.append_input_output_for_dygraph(op_proto, self.inputs,
True, False, block)
# prepare output variable
outputs = self.append_input_output_for_dygraph(
op_proto, self.outputs, False, False, block)
# prepare attrbutes
attrs_outputs = {}
if hasattr(self, "attrs"):
for attrs_name in self.attrs:
if self.attrs[attrs_name] is not None:
attrs_outputs[attrs_name] = self.attrs[attrs_name]
block.append_op(
type=self.op_type,
inputs=inputs,
outputs=outputs,
attrs=attrs_outputs if hasattr(self, "attrs") else None)
return outputs
def _calc_output(self,
place,
parallel=False,
no_check_set=None,
loss=None,
enable_inplace=None,
for_inplace_test=None):
program = Program()
block = program.global_block()
op = self._append_ops(block)
inputs = self._get_inputs(block)
outputs = self._get_outputs(block)
feed_map = self.feed_var(inputs, place)
if for_inplace_test:
# Some variables' tensors hold no buffer (tensor's _holder is NULL), like XShape in reshape2 op,
# and the shapes of those variables contain 0 (eg. Xshape.shape = [0, 2, 5]).
# Set persistable for those variables in order to get them from global_scope for inplace grad test directly other than feed them,
# since feed op calls check_memory_size() which fails when tensor's holder_ is NULL.
for out_name in op.output_arg_names:
var = block.var(out_name)
if 0 in var.shape:
var.persistable = True
original_program = program
if parallel:
use_cuda = False
if isinstance(place, fluid.CUDAPlace):
use_cuda = True
compiled_prog = fluid.CompiledProgram(program).with_data_parallel(
loss_name=loss.name if loss else None, places=place)
program = compiled_prog
fetch_list = getattr(self, "fetch_list", [])
# if the fetch_list is customized by user, we use it directly.
# if not, fill the fetch_list by the user configured outputs in test.
if len(fetch_list) == 0:
for var_name, var in six.iteritems(outputs):
if no_check_set is not None and var_name in no_check_set:
continue
if isinstance(var, list):
for v in var:
fetch_list.append(v.name)
else:
fetch_list.append(var.name)
# if the fetch_list still empty, fill the fetch_list by the operator output.
if len(fetch_list) == 0:
for out_name, out_dup in Operator.get_op_outputs(self.op_type):
fetch_list.append(str(out_name))
if enable_inplace is not None:
build_strategy = fluid.BuildStrategy()
build_strategy.enable_inplace = enable_inplace
compiled_prog = fluid.CompiledProgram(program).with_data_parallel(
build_strategy=build_strategy, places=place)
program = compiled_prog
executor = Executor(place)
outs = executor.run(program,
feed=feed_map,
fetch_list=fetch_list,
return_numpy=False)
self.op = op
self.program = original_program
if for_inplace_test:
return outs, fetch_list, feed_map, original_program, op.desc
else:
return outs, fetch_list
def _compare_expect_and_actual_outputs(self,
place,
fetch_list,
expect_outs,
actual_outs,
inplace_atol=None):
"""Compare expect outs and actual outs of an tested op.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
fetch_list (list): The outputs of tested op.
expect_outs (list): The expect outs of tested op.
actual_outs (list): The actual outs of tested op.
inplace_atol (float): The tolerable error, only set when tested op doesn't ensure computational consistency, like group_norm op.
Returns:
None.
"""
# compare expect_outs and actual_outs
for i, name in enumerate(fetch_list):
# Note(zhiqiu): inplace_atol should be only set when op doesn't ensure
# computational consistency.
# When inplace_atol is not None, the inplace check uses numpy.allclose
# to check inplace result instead of numpy.array_equal.
expect_out = np.array(expect_outs[i])
actual_out = np.array(actual_outs[i])
if inplace_atol is not None:
self.assertTrue(
np.allclose(
expect_out, actual_out, atol=inplace_atol),
"Output (" + name + ") has diff at " + str(place) +
" when using and not using inplace" + "\nExpect " +
str(expect_out) + "\n" + "But Got" + str(actual_out) +
" in class " + self.__class__.__name__)
else:
self.assertTrue(
np.array_equal(expect_out, actual_out),
"Output (" + name + ") has diff at " + str(place) +
" when using and not using inplace" + "\nExpect " +
str(expect_out) + "\n" + "But Got" + str(actual_out) +
" in class " + self.__class__.__name__ + '\n')
def _construct_grad_program_from_forward(self, fwd_program, grad_op_desc,
op_grad_to_var):
"""Generate grad_program which contains the grad_op.
Args:
fwd_program (tuple): The program that contains grad_op_desc's corresponding forward op.
grad_op_desc (OpDesc): The OpDesc of grad op.
op_grad_to_var (dict): The relation of variables in grad op and its forward op.
Returns:
grad_program (program): The program which contains the grad_op.
"""
grad_program = Program()
grad_block = grad_program.global_block()
new_op_desc = grad_block.desc.append_op()
new_op_desc.copy_from(grad_op_desc)
grad_program._sync_with_cpp()
# Create grad vars based on fwd vars (shape and dtype)
for arg in grad_op_desc.input_arg_names(
) + grad_op_desc.output_arg_names():
fwd_var_name = op_grad_to_var.get(arg, None)
if fwd_var_name is None:
fwd_var_name = arg
fwd_var = fwd_program.global_block().vars.get(fwd_var_name)
assert fwd_var is not None, "{} cannot be found".format(
fwd_var_name)
grad_var = grad_block.create_var(
name=arg,
dtype=fwd_var.dtype,
shape=fwd_var.shape,
type=fwd_var.type,
persistable=False)
# Some variables' tensors hold no buffer (tensor's _holder is NULL), like XShape in reshape2 op,
# and the shapes of those variables contain 0 (eg. Xshape.shape = [0, 2, 5]).
# Set persistable for those variables in order to get them from global_scope for inplace grad test directly other than feed them,
# since feed op calls check_memory_size() which fails when tensor's holder_ is NULL.
if 0 in grad_var.shape:
grad_var.persistable = True
grad_program._sync_with_cpp()
return grad_program
def _construct_grad_feed_map_from_forward(self, place, fwd_res,
grad_op_desc, op_grad_to_var):
"""Generate grad_feed_map for grad_program.
since we don`t really check gradient accuracy, but check the consistency when using and not using inplace,
we use fwd outs (also inputs sometimes) to construct grad inputs.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
fwd_res (tuple): The outputs of its forward op, in the same form as returns of _calc_outputs() when for_inplace_test is True.
i.e., tuple(fwd_outs, fwd_fetch_list, fwd_feed_map, fwd_program, fwd_op_desc)
grad_op_desc (OpDesc): The OpDesc of grad op.
op_grad_to_var (dict): The relation of variables in grad op and its fwd_op.
Returns:
grad_feed_map (dict): The feed_map of grad_op.
"""
fwd_outs, fwd_fetch_list, fwd_feed_map, fwd_program, fwd_op_desc = fwd_res
p = core.Place()
p.set_place(place)
grad_feed_map = {}
for arg in grad_op_desc.input_arg_names():
if arg in fwd_feed_map.keys():
grad_feed_map[arg] = fwd_feed_map[arg]._copy(p)
else:
fwd_var_name = op_grad_to_var.get(arg, None)
if fwd_var_name is None:
fwd_var_name = arg
for i, out_name in enumerate(fwd_fetch_list):
if out_name == fwd_var_name:
# don't feed variables whose tensors hold no buffer (shape contains 0 like shape = [0,2,5] and holder_ is NULL), like XShape in reshape2 op.
# get them from global_scope directly since we have set them persistable in fwd execution
if 0 in fwd_program.global_block().var(out_name).shape:
continue
else:
grad_feed_map[arg] = fwd_outs[i]._copy(p)
return grad_feed_map
def _get_need_run_ops(self, op_desc, fwd_op_desc=None):
"""Postorder traversal of the 'grad' tree to get all ops that need to run during inplace test.
An op needs to run druing inplace check if,
(1) it has infer_inplace,
(2) it has infer_inplace in its grad descendants. (since we need its outputs as to construct its grad's inputs)
Args:
op_desc (OpDesc): The op_desc of current op.
fwd_op_desc (OpDesc): The op_desc of current op's forward op, None if current op has no forward op.
Eg. relu's fwd_op is None, relu_grad's fwd_op is relu, relu_grad_grad's fwd_op is relu_grad, etc.
Returns:
need_run_ops (list[(op_desc, fwd_op_desc)]): The ops that need to run during inplace test.
"""
need_run_ops = []
visited_ops = []
def _dfs_grad_op(op_desc, fwd_op_desc=None):
visited_ops.append(op_desc.type())
has_infer_inplace = fluid.core.has_infer_inplace(op_desc.type())
has_grad_op_maker = fluid.core.has_grad_op_maker(op_desc.type())
has_infer_inplace_in_grad_descendants = False
if not has_grad_op_maker:
has_infer_inplace_in_descendants = False
else:
# get grad_op_desc
grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc(
op_desc, set(), [])
if not grad_op_desc_list:
has_infer_inplace_in_grad_descendants = False
else:
for i, grad_op_desc in enumerate(grad_op_desc_list):
if grad_op_desc.type(
) not in visited_ops and _dfs_grad_op(
grad_op_desc, fwd_op_desc=op_desc):
has_infer_inplace_in_grad_descendants = True
if has_infer_inplace or has_infer_inplace_in_grad_descendants:
need_run_ops.append((op_desc, fwd_op_desc))
return True
else:
return False
_dfs_grad_op(op_desc, fwd_op_desc=fwd_op_desc)
return need_run_ops
def _check_forward_inplace(self,
place,
no_check_set=None,
inplace_atol=None):
"""Check the inplace correctness of given op (self.op_type).
Run the op twice with same inputs, one enable inplace and another disable, compare their outputs.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
no_check_set (list): The names of outputs that needn't check, like XShape of reshape op.
inplace_atol (float): The tolerable error, only set when op doesn't ensure computational consistency, like group_norm op.
Returns:
expect_res (tuple(outs, fetch_list, feed_map, program, op_desc)): The results of given op.
We return this to construct grad_program and grad_feed_map for grad inplace check.
"""
# _calc_output() returns in the form tuple(outs, fetch_list, feed_map, program, op_desc) when for_inplace_test=True.
expect_res = self._calc_output(
place,
no_check_set=no_check_set,
enable_inplace=False,
for_inplace_test=True)
actual_res = self._calc_output(
place,
no_check_set=no_check_set,
enable_inplace=True,
for_inplace_test=True)
# compare expect_outs and actual_outs
self._compare_expect_and_actual_outputs(
place,
expect_res[1],
expect_res[0],
actual_res[0],
inplace_atol=inplace_atol)
return expect_res
def _calc_grad_output(self,
place,
fwd_res,
grad_op_desc,
enable_inplace=None):
"""Calculate grad_output for given grad_op_desc.
since we don`t really check gradient accuracy, but check the consistency when using and not using inplace,
we use fwd outs (also inputs sometimes) to construct grad inputs.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
fwd_res (tuple): The outputs of its forward op, in the same form as returns of _calc_outputs() when for_inplace_test is True.
i.e., tuple(fwd_outs, fwd_fetch_list, fwd_feed_map, fwd_program, fwd_op_desc).
grad_op_desc (OpDesc): The OpDesc of grad op.
enable_inplace (bool): Enable inplace or not.
Returns:
res (tuple(outs, fetch_list, feed_map, program, op_desc)): The results of given grad_op_desc.
"""
fwd_outs, fwd_fetch_list, fwd_feed_map, fwd_program, fwd_op_desc = fwd_res
grad_op_desc_list, op_grad_to_var = core.get_grad_op_desc(fwd_op_desc,
set(), [])
grad_program = self._construct_grad_program_from_forward(
fwd_program, grad_op_desc, op_grad_to_var)
grad_feed_map = self._construct_grad_feed_map_from_forward(
place, fwd_res, grad_op_desc, op_grad_to_var)
grad_fetch_list = grad_op_desc.output_arg_names()
exe = Executor(place)
program = grad_program
if enable_inplace is not None:
build_strategy = fluid.BuildStrategy()
build_strategy.enable_inplace = enable_inplace
compiled_program = fluid.CompiledProgram(
grad_program).with_data_parallel(
loss_name="", build_strategy=build_strategy, places=place)
program = compiled_program
outs = exe.run(program,
feed=grad_feed_map,
fetch_list=grad_fetch_list,
return_numpy=False)
return outs, grad_fetch_list, grad_feed_map, grad_program, grad_op_desc
def _check_grad_inplace(self,
place,
fwd_res,
grad_op_desc,
inplace_atol=None):
"""Check the inplace correctness of given grad_op_desc.
Run the grad op twice with same inputs, one enable inplace and another disable, compare their outputs.
It works like _check_forward_inplace, but the way to construct program and feed_map differs.
So we define a new function for grad, grad_grad, etc.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
fwd_res (tuple): The outputs of its forward op, in the same form as returns of _calc_outputs() when for_inplace_test is True.
i.e., tuple(fwd_outs, fwd_fetch_list, fwd_feed_map, fwd_program, fwd_op_desc).
grad_op_desc (OpDesc): The OpDesc of grad op.
inplace_atol (float): The tolerable error, only set when op doesn't ensure computational consistency, like group_norm op.
Returns:
expect_res (tuple(outs, fetch_list, feed_map, program, op_desc)): The results of given op.
We return this to construct grad_program and grad_feed_map for grad inplace check.
"""
expect_res = self._calc_grad_output(
place, fwd_res, grad_op_desc, enable_inplace=False)
actual_res = self._calc_grad_output(
place, fwd_res, grad_op_desc, enable_inplace=True)
self._compare_expect_and_actual_outputs(
place,
expect_res[1],
expect_res[0],
actual_res[0],
inplace_atol=inplace_atol)
return expect_res
def check_inplace_output_with_place(self,
place,
no_check_set=None,
inplace_atol=None):
"""Chech the inplace correctness of given op, its grad op, its grad_grad op, etc.
(1) Get all ops need to run. (see conditions in _get_need_run_ops())
(2) Run op in need_run_ops, and do inplace check if it has infer_inplace.
Args:
place (CPUPlace | CUDAPlace): The place where the op runs.
no_check_set (list): The names of outputs that needn't check, like XShape of reshape op.
inplace_atol (float): The tolerable error, only set when op doesn't ensure computational consistency, like group_norm op.
Returns:
None
"""
has_infer_inplace = fluid.core.has_infer_inplace(self.op_type)
has_grad_op_maker = fluid.core.has_grad_op_maker(self.op_type)
fwd_res = self._calc_output(
place, no_check_set=no_check_set, for_inplace_test=True)
op_desc = fwd_res[4]
need_run_ops = self._get_need_run_ops(op_desc)
res = {}
if hasattr(self, 'attrs') and bool(self.attrs.get('use_xpu', False)):
return
for op_desc, father_op_desc in reversed(need_run_ops):
# The first one is the forward op
has_infer_inplace = fluid.core.has_infer_inplace(op_desc.type())
if op_desc.type() == self.op_type:
if has_infer_inplace:
res[op_desc] = self._check_forward_inplace(
place,
no_check_set=no_check_set,
inplace_atol=inplace_atol)
else:
res[op_desc] = self._calc_output(
place, no_check_set=no_check_set, for_inplace_test=True)
else:
# TODO(zhiqiu): enhance inplace_grad test for ops (sum and activation) using mkldnn
# skip op that use_mkldnn currently
flags_use_mkldnn = fluid.core.globals()["FLAGS_use_mkldnn"]
attrs_use_mkldnn = hasattr(
self,
'attrs') and bool(self.attrs.get('use_mkldnn', False))
if flags_use_mkldnn or attrs_use_mkldnn:
warnings.warn(
"check inplace_grad for ops using mkldnn is not supported"
)
continue
if has_infer_inplace:
fwd_res = res[father_op_desc]
res[op_desc] = self._check_grad_inplace(
place, fwd_res, op_desc, inplace_atol=inplace_atol)
else:
res[op_desc] = self._calc_grad_output(place, fwd_res,
op_desc)
def check_output_with_place(self,
place,
atol=0,
no_check_set=None,
equal_nan=False,
check_dygraph=True,
inplace_atol=None,
check_eager=False):
self.infer_dtype_from_inputs_outputs(self.inputs, self.outputs)
if self.dtype == np.float64 and \
self.op_type not in op_threshold_white_list.NEED_FIX_FP64_CHECK_OUTPUT_THRESHOLD_OP_LIST:
atol = 0
if self.is_bfloat16_op():
if self.is_mkldnn_op():
check_dygraph = False
check_eager = False
if hasattr(self, 'force_fp32_output') and getattr(
self, 'force_fp32_output'):
atol = 1e-2
else:
atol = 2
else:
atol = 1e-2
if no_check_set is not None:
if self.op_type not in no_check_set_white_list.no_check_set_white_list:
raise AssertionError(
"no_check_set of op %s must be set to None." % self.op_type)
if check_dygraph:
dygraph_outs = self._calc_dygraph_output(
place, no_check_set=no_check_set)
if check_eager:
with _test_eager_guard():
eager_dygraph_outs = self._calc_dygraph_output(
place, no_check_set=no_check_set)
outs, fetch_list = self._calc_output(place, no_check_set=no_check_set)
for out_name, out_dup in Operator.get_op_outputs(self.op_type):
if out_name not in self.outputs:
continue
if no_check_set is not None and out_name in no_check_set:
continue
def find_imperative_actual(target_name, dygraph_outs, place):
with fluid.dygraph.base.guard(place=place):
for name in dygraph_outs:
if name == target_name:
return dygraph_outs[name][0]
var_list = dygraph_outs[name]
for i, var in enumerate(var_list):
if var.name == target_name:
return dygraph_outs[name][i]
self.assertTrue(False, "Found failed {} {}".format(
dygraph_outs.keys(), target_name))
def find_actual(target_name, fetch_list):
found = [
i for i, var_name in enumerate(fetch_list)
if var_name == target_name
]
self.assertTrue(
len(found) == 1, "Found {} {}".format(
len(found), target_name))
return found[0]
if out_dup:
sub_out = self.outputs[out_name]
if not isinstance(sub_out, list):
raise AssertionError("sub_out type %s is not list",
type(sub_out))
for item in sub_out:
sub_out_name, expect = item[0], item[1]
if check_dygraph:
imperative_actual = find_imperative_actual(
sub_out_name, dygraph_outs, place)
imperative_actual_t = np.array(imperative_actual.value()
.get_tensor())
if check_eager:
with _test_eager_guard():
eager_imperative_actual = find_imperative_actual(
sub_out_name, eager_dygraph_outs, place)
eager_imperative_actual_t = eager_imperative_actual.numpy(
)
idx = find_actual(sub_out_name, fetch_list)
actual = outs[idx]
actual_t = np.array(actual)
expect_t = expect[0] \
if isinstance(expect, tuple) else expect
self.assertTrue(
np.allclose(
actual_t, expect_t, atol=atol, equal_nan=equal_nan),
"Output (" + sub_out_name + ") has diff at " +
str(place))
if check_dygraph:
self.assertTrue(
np.allclose(
imperative_actual_t,
expect_t,
atol=atol,
equal_nan=equal_nan),
"Output (" + sub_out_name + ") has diff at " +
str(place) + " in dygraph mode")
if check_eager:
with _test_eager_guard():
self.assertTrue(
np.allclose(
eager_imperative_actual_t,
expect_t,
atol=atol,
equal_nan=equal_nan),
"Output (" + sub_out_name + ") has diff at " +
str(place) + " in eager dygraph mode")
if isinstance(expect, tuple):
self.assertListEqual(
actual.recursive_sequence_lengths(), expect[1],
"Output (" + sub_out_name +
") has different lod at " + str(place))
if check_dygraph:
self.assertListEqual(
imperative_actual.value().get_tensor()
.recursive_sequence_lengths(), expect[1],
"Output (" + out_name +
") has different lod at " + str(place) +
" in dygraph mode")
if check_eager:
with _test_eager_guard():
self.assertListEqual(
eager_imperative_actual.value().get_tensor()
.recursive_sequence_lengths(), expect[1],
"Output (" + out_name +
") has different lod at " + str(place) +
" in eager dygraph mode")
else:
if check_dygraph:
imperative_actual = find_imperative_actual(
out_name, dygraph_outs, place)
imperative_actual_t = np.array(imperative_actual.value()
.get_tensor())
if check_eager:
with _test_eager_guard():
eager_imperative_actual = find_imperative_actual(
out_name, eager_dygraph_outs, place)
eager_imperative_actual_t = eager_imperative_actual.numpy(
)
idx = find_actual(out_name, fetch_list)
actual = outs[idx]
actual_t = np.array(actual)
expect = self.outputs[out_name]
expect_t = expect[0] if isinstance(expect, tuple) else expect
# np.uint16 represents bfloat16
if actual_t.dtype == np.uint16 and expect_t.dtype in [
np.float32, np.float64
]:
actual_t = convert_uint16_to_float(actual_t)
rtol = 1.e-2
else:
rtol = 1.e-5
if expect_t.dtype == np.uint16 and actual_t.dtype == np.uint16:
expect_t = convert_uint16_to_float(expect_t)
actual_t = convert_uint16_to_float(actual_t)
atol = max(atol, 0.03)
# NOTE(zhiqiu): np.allclose([], [1.]) returns True
# see details: https://stackoverflow.com/questions/38331703/why-does-numpys-broadcasting-sometimes-allow-comparing-arrays-of-different-leng
if expect_t.size == 0:
self.assertTrue(actual_t.size == 0)
self.assertTrue(
np.allclose(
actual_t,
expect_t,
atol=atol,
rtol=rtol,
equal_nan=equal_nan),
"Output (" + out_name + ") has diff at " + str(place) +
"\nExpect " + str(expect_t) + "\n" + "But Got" +
str(actual_t) + " in class " + self.__class__.__name__)
if check_dygraph:
if self.is_bfloat16_op():
if imperative_actual_t.dtype == np.uint16:
imperative_actual_t = convert_uint16_to_float(
imperative_actual_t)
if expect_t.dtype == np.uint16:
expect_t = convert_uint16_to_float(expect_t)
if six.moves.reduce(
lambda x, y: x * y, imperative_actual_t.shape,
1) == 0 and six.moves.reduce(
lambda x, y: x * y, expect_t.shape, 1) == 0:
pass
else:
self.assertTrue(
np.allclose(
imperative_actual_t,
expect_t,
atol=atol,
rtol=rtol,
equal_nan=equal_nan),
"Output (" + out_name + ") has diff at " +
str(place) + "\nExpect " + str(expect_t) + "\n" +
"But Got" + str(imperative_actual_t) + " in class "
+ self.__class__.__name__)
if check_eager:
with _test_eager_guard():
if self.is_bfloat16_op():
if eager_imperative_actual_t.dtype == np.uint16:
eager_imperative_actual_t = convert_uint16_to_float(
eager_imperative_actual_t)
if expect_t.dtype == np.uint16:
expect_t = convert_uint16_to_float(expect_t)
if six.moves.reduce(lambda x, y: x * y,
eager_imperative_actual_t.shape,
1) == 0 and six.moves.reduce(
lambda x, y: x * y,
expect_t.shape, 1) == 0:
pass
else:
self.assertTrue(
np.allclose(
eager_imperative_actual_t,
expect_t,
atol=atol,
rtol=rtol,
equal_nan=equal_nan),
"Output (" + out_name + ") has diff at " +
str(place) + "\nExpect " + str(expect_t) + "\n"
+ "But Got" + str(eager_imperative_actual_t) +
" in class " + self.__class__.__name__)
if isinstance(expect, tuple):
self.assertListEqual(actual.recursive_sequence_lengths(),
expect[1], "Output (" + out_name +
") has different lod at " + str(place))
if check_dygraph:
self.assertListEqual(
imperative_actual.value().get_tensor()
.recursive_sequence_lengths(), expect[1],
"Output (" + out_name + ") has different lod at " +
str(place) + " in eager dygraph mode")
if check_eager:
with _test_eager_guard():
self.assertListEqual(
eager_imperative_actual.value().get_tensor()
.recursive_sequence_lengths(), expect[1],
"Output (" + out_name +
") has different lod at " + str(place) +
" in eager dygraph mode")
# Note(zhiqiu): inplace_atol should be only set when op doesn't ensure
# computational consistency.
# For example, group_norm uses AtomicAdd on CUDAPlace, which do not ensure
# computation order when multiple threads write the same address. So the
# result of group_norm is non-deterministic when datatype is float.
# When inplace_atol is not None, the inplace check uses numpy.allclose
# to check inplace result instead of numpy.array_equal.
if inplace_atol is not None:
warnings.warn(
"inplace_atol should only be set when op doesn't ensure computational consistency, please check it!"
)
# Check inplace for given op, its grad op, its grad_grad op, etc.
# No effect on original OpTest
# Currently not support ParallelExecutor on XPUPlace.
if not paddle.is_compiled_with_xpu(
) and not paddle.is_compiled_with_npu(
) and not paddle.is_compiled_with_mlu():
self.check_inplace_output_with_place(
place, no_check_set=no_check_set, inplace_atol=inplace_atol)
if check_eager:
return outs, dygraph_outs, eager_dygraph_outs, fetch_list
elif check_dygraph:
return outs, dygraph_outs, fetch_list
else:
return outs, fetch_list
def check_compile_vs_runtime(self, fetch_list, fetch_outs):
def find_fetch_index(target_name, fetch_list):
found = [
i for i, var_name in enumerate(fetch_list)
if var_name == target_name
]
if len(found) == 0:
return -1
else:
self.assertTrue(
len(found) == 1,
"Found {} {}".format(len(found), target_name))
return found[0]
for name in self.op.desc.output_names():
var_names = self.op.desc.output(name)
for var_name in var_names:
i = find_fetch_index(var_name, fetch_list)
if i == -1:
# The output is dispensiable or intermediate.
break
out = fetch_outs[i]
if isinstance(out, core.LoDTensor):
lod_level_runtime = len(out.lod())
else:
if isinstance(out, core.LoDTensorArray):
warnings.warn(
"The check of LoDTensorArray's lod_level is not implemented now!"
)
lod_level_runtime = 0
var = self.program.global_block().var(var_name)
if var.type == core.VarDesc.VarType.LOD_TENSOR:
lod_level_compile = var.lod_level
else:
lod_level_compile = 0
self.assertEqual(
lod_level_compile, lod_level_runtime,
"The lod_level of Output (" + name +
") is different between compile-time and runtime (" +
str(lod_level_compile) + " vs " + str(lod_level_runtime) +
")")
def _get_places(self):
if self.dtype == np.float16:
if core.is_compiled_with_cuda() and core.op_support_gpu(
self.op_type):
place = core.CUDAPlace(0)
if core.is_float16_supported(place):
return [place]
else:
return []
else:
return []
places = [fluid.CPUPlace()]
cpu_only = self._cpu_only if hasattr(self, '_cpu_only') else False
if core.is_compiled_with_cuda() and core.op_support_gpu(self.op_type)\
and not cpu_only:
places.append(core.CUDAPlace(0))
return places
def check_output(self,
atol=1e-5,
no_check_set=None,
equal_nan=False,
check_dygraph=True,
inplace_atol=None,
check_eager=False):
self.__class__.op_type = self.op_type
if self.is_mkldnn_op():
self.__class__.use_mkldnn = True
if self.is_xpu_op():
self.__class__.use_xpu = True
places = self._get_places()
for place in places:
res = self.check_output_with_place(
place,
atol,
no_check_set,
equal_nan,
check_dygraph,
inplace_atol,
check_eager=check_eager)
if check_eager:
assert check_dygraph == True
outs, dygraph_outs, eager_dygraph_outs, fetch_list = res
elif check_dygraph:
outs, dygraph_outs, fetch_list = res
else:
outs, fetch_list = res
if self.op_type not in compile_vs_runtime_white_list.COMPILE_RUN_OP_WHITE_LIST:
self.check_compile_vs_runtime(fetch_list, outs)
def check_output_customized(self, checker, custom_place=None):
places = self._get_places()
if custom_place:
places.append(custom_place)
for place in places:
outs = self.calc_output(place)
outs = [np.array(out) for out in outs]
outs.sort(key=len)
checker(outs)
def check_output_with_place_customized(self, checker, place):
outs = self.calc_output(place)
outs = [np.array(out) for out in outs]
outs.sort(key=len)
checker(outs)
def _assert_is_close(self, numeric_grads, analytic_grads, names,
max_relative_error, msg_prefix):
for a, b, name in six.moves.zip(numeric_grads, analytic_grads, names):
# It asserts np.abs(a - b) / np.abs(a) < max_relative_error, in which
# max_relative_error is 1e-7. According to the value of np.abs(a), we
# change np.abs(a) to achieve dynamic threshold. For example, if
# the value of np.abs(a) is between 1e-10 and 1e-8, we set np.abs(a)*=1e4.
# Therefore, it asserts np.abs(a - b) / (np.abs(a)*1e4) < max_relative_error,
# which is the same as np.abs(a - b) / np.abs(a) < max_relative_error*1e4.
abs_a = np.abs(a)
if self.dtype == np.float64 and \
self.op_type not in op_threshold_white_list.NEED_FIX_FP64_CHECK_GRAD_THRESHOLD_OP_LIST:
abs_a[abs_a < 1e-10] = 1e-3
abs_a[np.logical_and(abs_a > 1e-10, abs_a <= 1e-8)] *= 1e4
abs_a[np.logical_and(abs_a > 1e-8, abs_a <= 1e-6)] *= 1e2
elif self.is_bfloat16_op():
abs_a[abs_a < 1e-2] = 1
else:
abs_a[abs_a < 1e-3] = 1
diff_mat = np.abs(a - b) / abs_a
max_diff = np.max(diff_mat)
def err_msg():
offset = np.argmax(diff_mat > max_relative_error)
return ("Operator %s error, %s variable %s (shape: %s, dtype: %s) max gradient diff %e over limit %e, "
"the first error element is %d, expected %e, but got %e.") \
% (self.op_type, msg_prefix, name, str(a.shape), self.dtype, max_diff, max_relative_error,
offset, a.flatten()[offset], b.flatten()[offset])
self.assertLessEqual(max_diff, max_relative_error, err_msg())
def _check_grad_helper(self):
self.infer_dtype_from_inputs_outputs(self.inputs, self.outputs)
self.__class__.op_type = self.op_type
self.__class__.exist_check_grad = True
if self.dtype == np.float64:
self.__class__.exist_fp64_check_grad = True
def check_grad(self,
inputs_to_check,
output_names,
no_grad_set=None,
numeric_grad_delta=0.005,
in_place=False,
max_relative_error=0.005,
user_defined_grads=None,
user_defined_grad_outputs=None,
check_dygraph=True,
check_eager=False):
self._check_grad_helper()
places = self._get_places()
for place in places:
self.check_grad_with_place(
place,
inputs_to_check,
output_names,
no_grad_set,
numeric_grad_delta,
in_place,
max_relative_error,
user_defined_grads,
user_defined_grad_outputs,
check_dygraph,
check_eager=check_eager)
def check_grad_with_place(self,
place,
inputs_to_check,
output_names,
no_grad_set=None,
numeric_grad_delta=0.005,
in_place=False,
max_relative_error=0.005,
user_defined_grads=None,
user_defined_grad_outputs=None,
check_dygraph=True,
numeric_place=None,
check_eager=False):
self.scope = core.Scope()
op_inputs = self.inputs if hasattr(self, "inputs") else dict()
op_outputs = self.outputs if hasattr(self, "outputs") else dict()
op_attrs = self.attrs if hasattr(self, "attrs") else dict()
self._check_grad_helper()
if self.is_bfloat16_op() and self.is_mkldnn_op():
check_dygraph = False
check_eager = False
if self.dtype == np.float64 and \
self.op_type not in op_threshold_white_list.NEED_FIX_FP64_CHECK_GRAD_THRESHOLD_OP_LIST:
numeric_grad_delta = 1e-5
max_relative_error = 1e-7
cache_list = None
if hasattr(self, "cache_name_list"):
cache_list = self.cache_name_list
# oneDNN numeric gradient should use CPU kernel
use_onednn = False
if "use_mkldnn" in op_attrs and op_attrs["use_mkldnn"] == True:
op_attrs["use_mkldnn"] = False
use_onednn = True
self.op = create_op(
self.scope,
self.op_type,
op_inputs,
op_outputs,
op_attrs,
cache_list=cache_list)
if use_onednn:
op_attrs["use_mkldnn"] = True
if no_grad_set is None:
no_grad_set = set()
else:
if (self.op_type not in no_grad_set_white_list.NEED_TO_FIX_OP_LIST
) and (
self.op_type not in no_grad_set_white_list.NOT_CHECK_OP_LIST
) and (not self.is_bfloat16_op()):
raise AssertionError("no_grad_set must be None, op_type is " +
self.op_type + " Op.")
for input_to_check in inputs_to_check:
set_input(self.scope, self.op, self.inputs, place)
tensor_to_check = self.scope.find_var(input_to_check).get_tensor()
tensor_size = six.moves.reduce(lambda a, b: a * b,
tensor_to_check.shape(), 1)
if tensor_size < 100:
self.__class__.input_shape_is_large = False
if not type(output_names) is list:
output_names = [output_names]
if numeric_place is None:
numeric_place = place
numeric_grads = user_defined_grads or [
get_numeric_gradient(
numeric_place,
self.scope,
self.op,
self.inputs,
input_to_check,
output_names,
delta=numeric_grad_delta,
in_place=in_place) for input_to_check in inputs_to_check
]
analytic_grads = self._get_gradient(inputs_to_check, place,
output_names, no_grad_set,
user_defined_grad_outputs)
# comparison of bf16 results will happen as fp32
# loop over list of grads and convert bf16 to fp32
fp32_analytic_grads = []
for grad in analytic_grads:
if grad.dtype == np.uint16:
grad = convert_uint16_to_float(grad)
max_relative_error = 0.03 if max_relative_error < 0.03 else max_relative_error
fp32_analytic_grads.append(grad)
analytic_grads = fp32_analytic_grads
fp32_numeric_grads = []
for grad in numeric_grads:
if grad.dtype == np.uint16:
grad = convert_uint16_to_float(grad)
max_relative_error = 0.03 if max_relative_error < 0.03 else max_relative_error
fp32_numeric_grads.append(grad)
numeric_grads = fp32_numeric_grads
self._assert_is_close(numeric_grads, analytic_grads, inputs_to_check,
max_relative_error,
"Gradient Check On %s" % str(place))
if check_dygraph:
dygraph_grad = self._get_dygraph_grad(
inputs_to_check, place, output_names, user_defined_grad_outputs,
no_grad_set)
fp32_grads = []
for grad in dygraph_grad:
if grad.dtype == np.uint16:
grad = convert_uint16_to_float(grad)
max_relative_error = 0.03 if max_relative_error < 0.03 else max_relative_error
fp32_grads.append(grad)
dygraph_grad = fp32_grads
self._assert_is_close(numeric_grads, dygraph_grad, inputs_to_check,
max_relative_error,
"Gradient Check On %s" % str(place))
if check_eager:
with _test_eager_guard():
eager_dygraph_grad = self._get_dygraph_grad(
inputs_to_check, place, output_names,
user_defined_grad_outputs, no_grad_set)
fp32_grads = []
for grad in eager_dygraph_grad:
if grad.dtype == np.uint16:
grad = convert_uint16_to_float(grad)
max_relative_error = 0.03 if max_relative_error < 0.03 else max_relative_error
fp32_grads.append(grad)
eager_dygraph_grad = fp32_grads
self._assert_is_close(numeric_grads, eager_dygraph_grad,
inputs_to_check, max_relative_error,
"Gradient Check On %s" % str(place))
def _find_var_in_dygraph(self, output_vars, name):
if name in output_vars:
return output_vars[name]
else:
for output_vars_index in output_vars:
for output_vars_selected in output_vars[output_vars_index]:
if output_vars_selected.name == name:
return output_vars_selected
def _get_dygraph_grad(self,
inputs_to_check,
place,
output_names,
user_defined_grad_outputs=None,
no_grad_set=None):
with fluid.dygraph.base.guard(place=place):
block = fluid.default_main_program().global_block()
op_proto = OpProtoHolder.instance().get_op_proto(self.op_type)
# prepare input variable
inputs, inputs_grad_dict = self.append_input_output_for_dygraph(
op_proto, self.inputs, True, True, block)
# prepare output variable
outputs = self.append_input_output_for_dygraph(
op_proto, self.outputs, False, False, block)
# prepare attrbutes
attrs_outputs = {}
if hasattr(self, "attrs"):
for attrs_name in self.attrs:
if self.attrs[attrs_name] is not None:
attrs_outputs[attrs_name] = self.attrs[attrs_name]
block.append_op(
type=self.op_type,
inputs=inputs,
outputs=outputs,
attrs=attrs_outputs if hasattr(self, "attrs") else None)
if self.dtype == np.uint16:
cast_inputs = self._find_var_in_dygraph(outputs,
output_names[0])
cast_outputs = block.create_var(
dtype="float32", shape=cast_inputs[0].shape)
cast_op = block.append_op(
inputs={"X": cast_inputs},
outputs={"Out": cast_outputs},
type="cast",
attrs={
"in_dtype": core.VarDesc.VarType.BF16,
"out_dtype": core.VarDesc.VarType.FP32
})
outputs = {output_names[0]: cast_outputs}
outputs_valid = {}
for output_name in output_names:
outputs_valid[output_name] = self._find_var_in_dygraph(
outputs, output_name)
if user_defined_grad_outputs is None:
if len(outputs_valid) == 1:
loss = block.create_var(
dtype=self.dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False,
shape=[1])
for outputs_valid_key in outputs_valid:
block.append_op(
type="mean",
inputs={"X": outputs_valid[outputs_valid_key]},
outputs={"Out": [loss]},
attrs=None)
else:
avg_sum = []
for cur_loss in outputs_valid:
cur_avg_loss = block.create_var(
dtype=self.dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False)
block.append_op(
type="mean",
inputs={"X": outputs_valid[cur_loss]},
outputs={"Out": [cur_avg_loss]},
attrs=None)
avg_sum.append(cur_avg_loss)
loss_sum = block.create_var(
dtype=self.dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False,
shape=[1])
block.append_op(
type='sum',
inputs={"X": avg_sum},
outputs={"Out": loss_sum},
attrs=None)
loss = block.create_var(
dtype=self.dtype,
type=core.VarDesc.VarType.LOD_TENSOR,
persistable=False,
stop_gradient=False,
shape=[1])
block.append_op(
type='scale',
inputs={"X": loss_sum},
outputs={"Out": loss},
attrs={'scale': 1.0 / float(len(avg_sum))})
loss.backward()
fetch_list_grad = []
for inputs_to_check_name in inputs_to_check:
a = inputs_grad_dict[inputs_to_check_name].gradient()
fetch_list_grad.append(a)
return fetch_list_grad
else:
# user_defined_grad_outputs here are numpy arrays
if not isinstance(user_defined_grad_outputs, list):
user_defined_grad_outputs = [user_defined_grad_outputs]
grad_outputs = []
for grad_out_value in user_defined_grad_outputs:
grad_outputs.append(paddle.to_tensor(grad_out_value))
# delete the inputs which no need to calculate grad
for no_grad_val in no_grad_set:
del (inputs[no_grad_val])
grad_inputs = paddle.grad(
outputs=fluid.layers.utils.flatten(outputs),
inputs=fluid.layers.utils.flatten(inputs),
grad_outputs=grad_outputs)
return [grad.numpy() for grad in grad_inputs]
@staticmethod
def _numpy_to_lod_tensor(np_value, lod, place):
tensor = core.LoDTensor()
tensor.set(np_value, place)
if lod is not None:
tensor.set_recursive_sequence_lengths(lod)
return tensor
@staticmethod
def np_dtype_to_fluid_dtype(input):
return input
@staticmethod
def fluid_dtype_to_np_dtype(self, dtype):
return dtype
@staticmethod
def np_value_to_fluid_value(input):
return input
def _get_gradient(self,
input_to_check,
place,
output_names,
no_grad_set,
user_defined_grad_outputs=None,
parallel=False):
prog = Program()
scope = core.Scope()
block = prog.global_block()
self._append_ops(block)
inputs = self._get_inputs(block)
outputs = self._get_outputs(block)
feed_dict = self.feed_var(inputs, place)
if user_defined_grad_outputs is None:
if self.dtype == np.uint16:
cast_inputs = list(map(block.var, output_names))
cast_outputs = block.create_var(
dtype="float32", shape=cast_inputs[0].shape)
cast_op = block.append_op(
inputs={"X": cast_inputs},
outputs={"Out": cast_outputs},
type="cast",
attrs={
"in_dtype": core.VarDesc.VarType.BF16,
"out_dtype": core.VarDesc.VarType.FP32
})
cast_op.desc.infer_var_type(block.desc)
cast_op.desc.infer_shape(block.desc)
output_names = [cast_outputs.name]
loss = append_loss_ops(block, output_names)
param_grad_list = append_backward(
loss=loss,
parameter_list=input_to_check,
no_grad_set=no_grad_set)
fetch_list = [g for p, g in param_grad_list]
else:
assert parallel is False, "unsupported parallel mode when giving custom grad outputs."
# user_defined_grad_outputs here are numpy arrays
if not isinstance(user_defined_grad_outputs, list):
user_defined_grad_outputs = [user_defined_grad_outputs]
grad_outputs = []
for grad_out_value in user_defined_grad_outputs:
# `presistable` is used to avoid executor create new var in local scope
var = block.create_var(
shape=grad_out_value.shape,
dtype=grad_out_value.dtype,
persistable=True)
true_var = scope.var(var.name)
tensor = true_var.get_tensor()
tensor.set(grad_out_value, place)
grad_outputs.append(var)
targets = [
outputs[name] for name in outputs if name in output_names
]
inputs = [inputs[name] for name in input_to_check if name in inputs]
grad_inputs = paddle.static.gradients(targets, inputs, grad_outputs,
no_grad_set)
fetch_list = grad_inputs
if parallel:
use_cuda = False
if isinstance(place, fluid.CUDAPlace):
use_cuda = True
compiled_prog = fluid.CompiledProgram(prog).with_data_parallel(
loss_name=loss.name, places=place)
prog = compiled_prog
executor = fluid.Executor(place)
return list(
map(np.array,
executor.run(prog,
feed_dict,
fetch_list,
scope=scope,
return_numpy=False)))
class OpTestTool:
@classmethod
def skip_if(cls, condition: object, reason: str):
return unittest.skipIf(condition, reason)
@classmethod
def skip_if_not_cpu_bf16(cls):
return OpTestTool.skip_if(
not (isinstance(_current_expected_place(), core.CPUPlace) and
core.supports_bfloat16()),
"Place does not support BF16 evaluation")
@classmethod
def skip_if_not_cpu(cls):
return OpTestTool.skip_if(
not isinstance(_current_expected_place(), core.CPUPlace),
"OneDNN supports only CPU for now")
| 44.465746
| 297
| 0.546329
|
46561f3d9b2863db5d1903e9cf0fcf5b52bafd2d
| 163
|
py
|
Python
|
tfsnippet/variational/__init__.py
|
Feng37/tfsnippet
|
70c7dc5c8c8f6314f9d9e44697f90068417db5cd
|
[
"MIT"
] | null | null | null |
tfsnippet/variational/__init__.py
|
Feng37/tfsnippet
|
70c7dc5c8c8f6314f9d9e44697f90068417db5cd
|
[
"MIT"
] | null | null | null |
tfsnippet/variational/__init__.py
|
Feng37/tfsnippet
|
70c7dc5c8c8f6314f9d9e44697f90068417db5cd
|
[
"MIT"
] | 1
|
2020-02-08T15:33:41.000Z
|
2020-02-08T15:33:41.000Z
|
from . import chain, inference
__all__ = sum(
[m.__all__ for m in [
chain, inference,
]],
[]
)
from .chain import *
from .inference import *
| 13.583333
| 30
| 0.588957
|
6caa7f8cacc261c277ebce603fae98affa55d5ba
| 3,196
|
py
|
Python
|
moto/__init__.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
moto/__init__.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
moto/__init__.py
|
jzucker2/moto
|
ba3c9db8a76f1428892e867c68c1e2f4c04c1fa1
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
import logging
# logging.getLogger('boto').setLevel(logging.CRITICAL)
__title__ = 'moto'
__version__ = '1.3.2'
from .acm import mock_acm # flake8: noqa
from .apigateway import mock_apigateway, mock_apigateway_deprecated # flake8: noqa
from .autoscaling import mock_autoscaling, mock_autoscaling_deprecated # flake8: noqa
from .awslambda import mock_lambda, mock_lambda_deprecated # flake8: noqa
from .cloudformation import mock_cloudformation, mock_cloudformation_deprecated # flake8: noqa
from .cloudwatch import mock_cloudwatch, mock_cloudwatch_deprecated # flake8: noqa
from .cognitoidentity import mock_cognitoidentity, mock_cognitoidentity_deprecated # flake8: noqa
from .datapipeline import mock_datapipeline, mock_datapipeline_deprecated # flake8: noqa
from .dynamodb import mock_dynamodb, mock_dynamodb_deprecated # flake8: noqa
from .dynamodb2 import mock_dynamodb2, mock_dynamodb2_deprecated # flake8: noqa
from .ec2 import mock_ec2, mock_ec2_deprecated # flake8: noqa
from .ecr import mock_ecr, mock_ecr_deprecated # flake8: noqa
from .ecs import mock_ecs, mock_ecs_deprecated # flake8: noqa
from .elb import mock_elb, mock_elb_deprecated # flake8: noqa
from .elbv2 import mock_elbv2 # flake8: noqa
from .emr import mock_emr, mock_emr_deprecated # flake8: noqa
from .events import mock_events # flake8: noqa
from .glacier import mock_glacier, mock_glacier_deprecated # flake8: noqa
from .iam import mock_iam, mock_iam_deprecated # flake8: noqa
from .kinesis import mock_kinesis, mock_kinesis_deprecated # flake8: noqa
from .kms import mock_kms, mock_kms_deprecated # flake8: noqa
from .opsworks import mock_opsworks, mock_opsworks_deprecated # flake8: noqa
from .polly import mock_polly # flake8: noqa
from .rds import mock_rds, mock_rds_deprecated # flake8: noqa
from .rds2 import mock_rds2, mock_rds2_deprecated # flake8: noqa
from .redshift import mock_redshift, mock_redshift_deprecated # flake8: noqa
from .s3 import mock_s3, mock_s3_deprecated # flake8: noqa
from .ses import mock_ses, mock_ses_deprecated # flake8: noqa
from .sns import mock_sns, mock_sns_deprecated # flake8: noqa
from .sqs import mock_sqs, mock_sqs_deprecated # flake8: noqa
from .sts import mock_sts, mock_sts_deprecated # flake8: noqa
from .ssm import mock_ssm # flake8: noqa
from .route53 import mock_route53, mock_route53_deprecated # flake8: noqa
from .swf import mock_swf, mock_swf_deprecated # flake8: noqa
from .xray import mock_xray, mock_xray_client, XRaySegment # flake8: noqa
from .logs import mock_logs, mock_logs_deprecated # flake8: noqa
from .batch import mock_batch # flake8: noqa
from .resourcegroupstaggingapi import mock_resourcegroupstaggingapi # flake8: noqa
from .iot import mock_iot # flake8: noqa
from .iotdata import mock_iotdata # flake8: noqa
try:
# Need to monkey-patch botocore requests back to underlying urllib3 classes
from botocore.awsrequest import HTTPSConnectionPool, HTTPConnectionPool, HTTPConnection, VerifiedHTTPSConnection
except ImportError:
pass
else:
HTTPSConnectionPool.ConnectionCls = VerifiedHTTPSConnection
HTTPConnectionPool.ConnectionCls = HTTPConnection
| 55.103448
| 116
| 0.811952
|
acf2c0aa1f088833625b011a16bc9d7260c0cecb
| 1,046
|
py
|
Python
|
examples/http/hello_world.py
|
viatoriche/microservices
|
3510563edd15dc6131b8a948d6062856cd904ac7
|
[
"MIT"
] | 18
|
2016-04-04T03:01:37.000Z
|
2020-08-18T03:03:40.000Z
|
examples/http/hello_world.py
|
viatoriche/microservices
|
3510563edd15dc6131b8a948d6062856cd904ac7
|
[
"MIT"
] | 7
|
2016-05-06T14:23:16.000Z
|
2019-11-20T11:16:35.000Z
|
examples/http/hello_world.py
|
viatoriche/microservices
|
3510563edd15dc6131b8a948d6062856cd904ac7
|
[
"MIT"
] | 5
|
2016-05-06T08:20:40.000Z
|
2019-07-13T01:34:38.000Z
|
from microservices.http.service import Microservice
from microservices.http.resources import ResourceMarker, ResourceSchema, BrowserResourceSchema
from flask import request
import datetime
app = Microservice(__name__)
app.config['SCHEMA'] = ResourceSchema(
response='result',
response_update=False,
status_code='status',
browser=BrowserResourceSchema(
status=None,
)
)
@app.route(
'/',
resource=ResourceMarker(
update={
'resource_created': datetime.datetime.now().isoformat()
},
),
)
def hello_world():
return {'hello': 'Hello, world'}
@app.route(
'/<string:one>/<string:two>/<string:three>/',
methods=['GET', 'POST'],
resource=ResourceMarker(
url_params={'one': 'one', 'two': 'two', 'three': 'three'},
)
)
def one_two_three(one, two, three):
response = {'one': one, 'two': two, 'three': three}
if request.method == 'POST':
response['data'] = request.data
return response
if __name__ == '__main__':
app.run(debug=True)
| 23.244444
| 94
| 0.644359
|
88786442e40ed64abe2702788507b077a6669033
| 5,748
|
py
|
Python
|
src/ggrc/login/__init__.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-01-12T23:46:00.000Z
|
2019-01-12T23:46:00.000Z
|
src/ggrc/login/__init__.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
src/ggrc/login/__init__.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""ggrc.login
Provides basic login and session management using Flask-Login with various
backends
"""
import datetime
import json
import logging
import re
from functools import wraps
from werkzeug.exceptions import Forbidden
import flask_login
from flask import g
from flask import request
from flask import redirect
from ggrc.extensions import get_extension_module_for
from ggrc.rbac import SystemWideRoles
from ggrc import settings
logger = logging.getLogger(__name__)
def get_login_module():
return get_extension_module_for('LOGIN_MANAGER', False)
def user_loader(user_id):
"""Returns the auth user by user_id"""
from ggrc.utils.user_generator import find_user_by_id
user = find_user_by_id(user_id)
if user and settings.DEBUG:
from google.appengine.api import users
try:
ae_user = users.get_current_user()
except AssertionError:
ae_user = None
if ae_user and ae_user.email() != user.email:
return None
return user
def init_app(app):
"""Initialize Flask_Login LoginManager with our app"""
login_module = get_login_module()
if not login_module:
return
login_manager = flask_login.LoginManager()
login_manager.init_app(app)
# login_manager.session_protection = 'strong'
# pylint: disable=unused-variable
@app.login_manager.unauthorized_handler
def unauthorized():
"""Redirects to the login page and generates an error.
Called when the user tries to access an endpoint guarded with
login_required but they are not authorized.
Endpoints like /dashboard, /program/1, etc. redirect the user to the
/login page.
Endpoints like /api /query, /import, etc. resolve with 401 UNAUTHORIZED
and a simple json error object.
"""
if (re.match(r'^(\/api|\/query|\/search)', request.path) or
request.headers.get('X-Requested-By') == 'GGRC'):
return json.dumps({'error': 'unauthorized'}), 401
return redirect(flask_login.login_url('/login', request.url))
app.route('/login')(login_module.login)
app.route('/logout')(login_module.logout)
app.login_manager.user_loader(user_loader)
if hasattr(login_module, 'before_request'):
app.before_request(login_module.before_request)
if hasattr(login_module, 'request_loader'):
app.login_manager.request_loader(login_module.request_loader)
# app.context_processor(login_module.session_context)
def get_current_user(use_external_user=True):
"""Gets current user.
Retrieves the current logged-in user or the external user given
in the X-external-user header based on the provided flag.
Args:
use_external_user: indicates should we use external user or not.
Returns:
current user.
"""
logged_in_user = _get_current_logged_user()
if use_external_user and is_external_app_user():
try:
from ggrc.utils.user_generator import parse_user_email
external_user_email = parse_user_email(request,
"X-external-user",
mandatory=False)
if external_user_email:
from ggrc.utils.user_generator import find_user
ext_user = find_user(external_user_email, modifier=logged_in_user.id)
if ext_user:
return ext_user
except RuntimeError:
logger.info("Working outside of request context.")
return logged_in_user
def _get_current_logged_user():
"""Gets current logged-in user."""
if hasattr(g, '_current_user'):
return getattr(g, '_current_user')
if get_login_module():
return flask_login.current_user
return None
def get_current_user_id(use_external_user=True):
"""Gets current user id.
Retrieves the current logged-in user id or the external user id
based on the provided flag.
Args:
use_external_user: indicates should we use external user or not.
Returns:
current user id.
"""
user = get_current_user(use_external_user)
if user and not user.is_anonymous():
return user.id
return None
def login_required(func):
"""Decorator for functions that require users to be logged in."""
if get_login_module():
return flask_login.login_required(func)
return func
def admin_required(func):
"""Admin rights required decorator.
Raises:
Forbidden: if the current user is not an admin.
"""
@wraps(func)
def admin_check(*args, **kwargs):
"""Helper function that performs the admin check"""
user = _get_current_logged_user()
role = getattr(user, 'system_wide_role', None)
if role not in SystemWideRoles.admins:
raise Forbidden()
return func(*args, **kwargs)
return admin_check
def is_creator():
"""Check if the current user has global role Creator."""
current_user = _get_current_logged_user()
return (hasattr(current_user, 'system_wide_role') and
current_user.system_wide_role == SystemWideRoles.CREATOR)
def is_external_app_user():
"""Checks if the current user is an external application.
Account for external application is defined in settings. External application
requests require special processing and validations.
"""
user = _get_current_logged_user()
if not user or user.is_anonymous():
return False
from ggrc.utils.user_generator import is_app_2_app_user_email
return is_app_2_app_user_email(user.email)
def get_user_date():
"""
Get current user timezone from HTTP request
and prepare users current date
Returns:
datetime.date()
"""
user_tz_offset = int(request.headers.get('x-usertimezoneoffset', 0)) / 60
user_date = (datetime.datetime.now() + datetime.timedelta(
hours=user_tz_offset)).date()
return user_date
| 28.455446
| 79
| 0.727731
|
c816b4599286dff28045a91ba0f130043214e147
| 12,796
|
py
|
Python
|
tools/eval.py
|
scoyer/DDMN
|
a604ac8c12627327aacf70ae0cd1c70e083870f1
|
[
"Apache-2.0"
] | 9
|
2020-12-15T10:56:19.000Z
|
2021-11-25T02:54:49.000Z
|
tools/eval.py
|
scoyer/DDMN
|
a604ac8c12627327aacf70ae0cd1c70e083870f1
|
[
"Apache-2.0"
] | 2
|
2021-03-03T02:56:37.000Z
|
2021-05-12T15:22:40.000Z
|
tools/eval.py
|
scoyer/DDMN
|
a604ac8c12627327aacf70ae0cd1c70e083870f1
|
[
"Apache-2.0"
] | 3
|
2021-02-08T16:03:32.000Z
|
2021-05-24T10:54:01.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
File: eval.py
"""
import argparse
import json
import numpy as np
from source.utils.metrics import moses_multi_bleu
from source.utils.metrics import compute_prf, compute_f1
def eval_bleu(eval_fp):
hyps = []
refs = []
with open(eval_fp, 'r') as fr:
for line in fr:
dialog = json.loads(line.strip())
pred_str = dialog["result"]
gold_str = dialog["target"]
hyps.append(pred_str)
refs.append(gold_str)
assert len(hyps) == len(refs)
hyp_arrys = np.array(hyps)
ref_arrys = np.array(refs)
bleu_score = moses_multi_bleu(hyp_arrys, ref_arrys, lowercase=True)
return bleu_score
def cal_resp_acc(gold_str, pred_str):
targets = gold_str.split()
preds = pred_str.split()
max_len = max(len(targets), len(preds))
if len(preds) < max_len:
pads = ['<PAD>'] * (max_len-len(preds))
preds += pads
else:
pads = ['<PAD>'] * (max_len-len(targets))
targets += pads
token_acc_list = [1 if targets[i] == preds[i] else 0 for i in range(max_len)]
resp_acc = np.mean(token_acc_list)
return resp_acc
def eval_dialog_accuracy(eval_fp):
dialog_acc_dict = dict()
resp_acc_list = []
with open(eval_fp, 'r') as fr:
for line in fr:
dialog = json.loads(line.strip())
dialog_id = dialog["dialog_id"]
pred_str = dialog["result"]
gold_str = dialog["target"]
resp_acc = cal_resp_acc(gold_str, pred_str)
resp_acc_list.append(resp_acc)
if dialog_id not in dialog_acc_dict.keys():
dialog_acc_dict[dialog_id] = []
dialog_acc_dict[dialog_id].append(resp_acc)
resp_acc_score = np.mean(resp_acc_list)
dialog_acc_list = [np.mean(dialog_acc_dict[k]) for k in dialog_acc_dict.keys()]
dialog_acc_score = np.mean(dialog_acc_list)
return resp_acc_score, dialog_acc_score
def eval_entity_f1_kvr(eval_fp, entity_fp, average="micro"):
test_data = []
with open(eval_fp, 'r') as fr:
for line in fr:
ent_idx_sch, ent_idx_wet, ent_idx_nav = [], [], []
dialog = json.loads(line.strip())
if len(dialog["gold_entity"]) > 0:
dialog["gold_entity"] = ' '.join(dialog["gold_entity"]).replace('_', ' ').split()
if dialog["task"] == "schedule":
ent_idx_sch = dialog["gold_entity"]
elif dialog["task"] == "weather":
ent_idx_wet = dialog["gold_entity"]
elif dialog["task"] == "navigate":
ent_idx_nav = dialog["gold_entity"]
ent_index = list(set(ent_idx_sch + ent_idx_wet + ent_idx_nav))
dialog["ent_index"] = ent_index
dialog["ent_idx_sch"] = list(set(ent_idx_sch))
dialog["ent_idx_wet"] = list(set(ent_idx_wet))
dialog["ent_idx_nav"] = list(set(ent_idx_nav))
test_data.append(dialog)
with open(entity_fp, 'r') as fr:
global_entity = json.load(fr)
global_entity_list = []
for key in global_entity.keys():
if key != 'poi':
global_entity_list += [item.lower().replace(' ', '_') for item in global_entity[key]]
else:
for item in global_entity['poi']:
global_entity_list += [item[k].lower().replace(' ', '_') for k in item.keys()]
global_entity_list = list(set(global_entity_list))
F1_pred, F1_sch_pred, F1_nav_pred, F1_wet_pred = 0, 0, 0, 0
F1_count, F1_sch_count, F1_nav_count, F1_wet_count = 0, 0, 0, 0
TP_all, FP_all, FN_all = 0, 0, 0
TP_sch, FP_sch, FN_sch = 0, 0, 0
TP_wet, FP_wet, FN_wet = 0, 0, 0
TP_nav, FP_nav, FN_nav = 0, 0, 0
for dialog in test_data:
pred_tokens = dialog["result"].replace('_', ' ').split()
kb_arrys = dialog["kb"]
gold_ents = dialog["ent_index"]
tp, fp, fn, f1, count = compute_prf(gold_ents, pred_tokens, global_entity_list, kb_arrys)
TP_all += tp
FP_all += fp
FN_all += fn
F1_pred += f1
F1_count += count
gold_sch_ents = dialog["ent_idx_sch"]
tp, fp, fn, f1, count = compute_prf(gold_sch_ents, pred_tokens, global_entity_list, kb_arrys)
TP_sch += tp
FP_sch += fp
FN_sch += fn
F1_sch_pred += f1
F1_sch_count += count
gold_wet_ents = dialog["ent_idx_wet"]
tp, fp, fn, f1, count = compute_prf(gold_wet_ents, pred_tokens, global_entity_list, kb_arrys)
TP_wet += tp
FP_wet += fp
FN_wet += fn
F1_wet_pred += f1
F1_wet_count += count
gold_nav_ents = dialog["ent_idx_nav"]
tp, fp, fn, f1, count = compute_prf(gold_nav_ents, pred_tokens, global_entity_list, kb_arrys)
TP_nav += tp
FP_nav += fp
FN_nav += fn
F1_nav_pred += f1
F1_nav_count += count
if average == "micro":
F1_score = compute_f1(TP_all, FP_all, FN_all)
F1_sch_score = compute_f1(TP_sch, FP_sch, FN_sch)
F1_wet_score = compute_f1(TP_wet, FP_wet, FN_wet)
F1_nav_score = compute_f1(TP_nav, FP_nav, FN_nav)
else:
F1_score = F1_pred / float(F1_count)
F1_sch_score = F1_sch_pred / float(F1_sch_count)
F1_wet_score = F1_wet_pred / float(F1_wet_count)
F1_nav_score = F1_nav_pred / float(F1_nav_count)
return F1_score, F1_sch_score, F1_wet_score, F1_nav_score
def eval_entity_f1_multiwoz(eval_fp, entity_fp, average="micro"):
test_data = []
with open(eval_fp, 'r') as fr:
for line in fr:
ent_idx_res, ent_idx_att, ent_idx_hotel = [], [], []
dialog = json.loads(line.strip())
if len(dialog["gold_entity"]) > 0:
dialog["gold_entity"] = ' '.join(dialog["gold_entity"]).replace('_', ' ').split()
if dialog["task"] == "restaurant":
ent_idx_res = dialog["gold_entity"]
elif dialog["task"] == "attraction":
ent_idx_att = dialog["gold_entity"]
elif dialog["task"] == "hotel":
ent_idx_hotel = dialog["gold_entity"]
ent_index = list(set(ent_idx_res + ent_idx_att + ent_idx_hotel))
dialog["ent_index"] = ent_index
dialog["ent_idx_res"] = list(set(ent_idx_res))
dialog["ent_idx_att"] = list(set(ent_idx_att))
dialog["ent_idx_hotel"] = list(set(ent_idx_hotel))
test_data.append(dialog)
with open(entity_fp, 'r') as fr:
global_entity = json.load(fr)
global_entity_list = []
for key in global_entity.keys():
global_entity_list += [item.lower().replace(' ', '_') for item in global_entity[key]]
global_entity_list = list(set(global_entity_list))
F1_pred, F1_res_pred, F1_att_pred, F1_hotel_pred = 0, 0, 0, 0
F1_count, F1_res_count, F1_att_count, F1_hotel_count = 0, 0, 0, 0
TP_all, FP_all, FN_all = 0, 0, 0
TP_res, FP_res, FN_res = 0, 0, 0
TP_att, FP_att, FN_att = 0, 0, 0
TP_hotel, FP_hotel, FN_hotel = 0, 0, 0
for dialog in test_data:
pred_tokens = dialog["result"].replace('_', ' ').split()
kb_arrys = dialog["kb"]
gold_ents = dialog["ent_index"]
tp, fp, fn, f1, count = compute_prf(gold_ents, pred_tokens, global_entity_list, kb_arrys)
TP_all += tp
FP_all += fp
FN_all += fn
F1_pred += f1
F1_count += count
gold_res_ents = dialog["ent_idx_res"]
tp, fp, fn, f1, count = compute_prf(gold_res_ents, pred_tokens, global_entity_list, kb_arrys)
TP_res += tp
FP_res += fp
FN_res += fn
F1_res_pred += f1
F1_res_count += count
gold_att_ents = dialog["ent_idx_att"]
tp, fp, fn, f1, count = compute_prf(gold_att_ents, pred_tokens, global_entity_list, kb_arrys)
TP_att += tp
FP_att += fp
FN_att += fn
F1_att_pred += f1
F1_att_count += count
gold_hotel_ents = dialog["ent_idx_hotel"]
tp, fp, fn, f1, count = compute_prf(gold_hotel_ents, pred_tokens, global_entity_list, kb_arrys)
TP_hotel += tp
FP_hotel += fp
FN_hotel += fn
F1_hotel_pred += f1
F1_hotel_count += count
if average == "micro":
F1_score = compute_f1(TP_all, FP_all, FN_all)
F1_res_score = compute_f1(TP_res, FP_res, FN_res)
F1_att_score = compute_f1(TP_att, FP_att, FN_att)
F1_hotel_score = compute_f1(TP_hotel, FP_hotel, FN_hotel)
else:
F1_score = F1_pred / float(F1_count)
F1_res_score = F1_res_pred / float(F1_res_count)
F1_att_score = F1_att_pred / float(F1_att_count)
F1_hotel_score = F1_hotel_pred / float(F1_hotel_count)
return F1_score, F1_res_score, F1_att_score, F1_hotel_score
def eval_entity_f1_camrest(eval_fp, entity_fp, average="micro"):
test_data = []
with open(eval_fp, 'r') as fr:
for line in fr:
dialog = json.loads(line.strip())
if len(dialog["gold_entity"]) > 0:
dialog["gold_entity"] = ' '.join(dialog["gold_entity"]).replace('_', ' ').split()
test_data.append(dialog)
with open(entity_fp, 'r') as fr:
global_entity = json.load(fr)
global_entity_list = []
for key in global_entity.keys():
global_entity_list += [item.lower().replace(' ', '_') for item in global_entity[key]]
global_entity_list = list(set(global_entity_list))
F1_pred, F1_count = 0, 0
TP_all, FP_all, FN_all = 0, 0, 0
for dialog in test_data:
pred_tokens = dialog["result"].replace('_', ' ').split()
kb_arrys = dialog["kb"]
gold_ents = dialog["gold_entity"]
tp, fp, fn, f1, count = compute_prf(gold_ents, pred_tokens, global_entity_list, kb_arrys)
F1_pred += f1
TP_all += tp
FP_all += fp
FN_all += fn
F1_count += count
if average == "micro":
F1_score = compute_f1(TP_all, FP_all, FN_all)
else:
F1_score = F1_pred / float(F1_count)
return F1_score
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--data_name", type=str)
parser.add_argument("--data_dir", type=str)
parser.add_argument("--eval_dir", type=str)
args = parser.parse_args()
data_dir = args.data_dir
eval_dir = args.eval_dir
eval_file = "%s/output.txt" % eval_dir
# calculate bleu
bleu = eval_bleu(eval_file)
# calculate acc
resp_acc, dialog_acc = eval_dialog_accuracy(eval_file)
# calculate entity F1
if args.data_name == 'kvr':
entity_file = "%s/kvret_entities.json" % data_dir
f1_score, f1_sch, f1_wet, f1_nav = eval_entity_f1_kvr(eval_file, entity_file, average="micro")
output_str = "BLEU SCORE: %.3f\n" % bleu
output_str += "Per resp. ACC: %.2f%%\n" %(resp_acc * 100)
output_str += "Per dialog ACC: %.2f%%\n" % (dialog_acc * 100)
output_str += "F1 SCORE: %.2f%%\n" % (f1_score * 100)
output_str += "Sch. F1: %.2f%%\n" % (f1_sch * 100)
output_str += "Wet. F1: %.2f%%\n" % (f1_wet * 100)
output_str += "Nav. F1: %.2f%%" % (f1_nav * 100)
print(output_str)
elif args.data_name == 'multiwoz':
entity_file = "%s/global_entities.json" % data_dir
f1_score, f1_res, f1_att, f1_hotel = eval_entity_f1_multiwoz(eval_file, entity_file, average="micro")
output_str = "BLEU SCORE: %.3f\n" % bleu
output_str += "Per resp. ACC: %.2f%%\n" %(resp_acc * 100)
output_str += "Per dialog ACC: %.2f%%\n" % (dialog_acc * 100)
output_str += "F1 SCORE: %.2f%%\n" % (f1_score * 100)
output_str += "Res. F1: %.2f%%\n" % (f1_res * 100)
output_str += "Att. F1: %.2f%%\n" % (f1_att * 100)
output_str += "Hot. F1: %.2f%%" % (f1_hotel * 100)
print(output_str)
elif args.data_name == 'camrest':
entity_file = "%s/camrest676-entities.json" % data_dir
f1_score = eval_entity_f1_camrest(eval_file, entity_file, average="micro")
output_str = "BLEU SCORE: %.3f\n" % bleu
output_str += "Per resp. ACC: %.2f%%\n" % (resp_acc * 100)
output_str += "Per dialog ACC: %.2f%%\n" % (dialog_acc * 100)
output_str += "F1 SCORE: %.2f%%" % (f1_score * 100)
print(output_str)
else:
output_str = "No dataset available!"
print(output_str)
# write evaluation results to file
out_file = "%s/eval.result.txt" % eval_dir
with open(out_file, 'w') as fw:
fw.write(output_str)
print("Saved evaluation results to '{}.'".format(out_file))
| 38.197015
| 109
| 0.598312
|
d63a344d0eacb576c0431df08ddc18bfa27626ae
| 1,038
|
py
|
Python
|
interface/fake_tracking_object.py
|
hywel1994/RosPerception
|
27f3284ad4a587eb7168e4ba9e9f37e18642fe5e
|
[
"MIT"
] | null | null | null |
interface/fake_tracking_object.py
|
hywel1994/RosPerception
|
27f3284ad4a587eb7168e4ba9e9f37e18642fe5e
|
[
"MIT"
] | null | null | null |
interface/fake_tracking_object.py
|
hywel1994/RosPerception
|
27f3284ad4a587eb7168e4ba9e9f37e18642fe5e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# for ros
import rospy
from helper.msg import BaseSensor
from helper.msg import ObjectArray
from helper.msg import Object
import math
def sensorCallback(msg):
d_object = Object()
d_object.semantic_name = 'boat'
d_object.semantic_id = 76
d_object.world_pose.point.x = msg.x_target
d_object.world_pose.point.y = msg.y_target
d_object.world_pose.point.z = 0
d_object.velocity = math.sqrt(pow(msg.vx_target, 2)+pow(msg.vy_target, 2))
d_object.heading = msg.yaw_target
d_object.semantic_confidence = 0.6
d_object.width = 1
d_object.length = 1
d_object.height = 1
d_object.is_new_track = True
d_object_list = ObjectArray()
d_object_list.list.append(d_object)
object_array_pub.publish(d_object_list)
if __name__ == "__main__":
rospy.init_node("fake_tracking", anonymous = True)
object_array_pub = rospy.Publisher('/tracking/objects', ObjectArray, queue_size=5)
rospy.Subscriber("/base/sensor", BaseSensor, sensorCallback)
rospy.spin()
| 29.657143
| 86
| 0.728324
|
965edee3130c58375821db630aa53182ac0489f5
| 2,799
|
py
|
Python
|
tests/test_soft_label_loss.py
|
jiansowa/PaddleSlim
|
a45431c99a775782b7fe5633f313d36ff582e797
|
[
"Apache-2.0"
] | null | null | null |
tests/test_soft_label_loss.py
|
jiansowa/PaddleSlim
|
a45431c99a775782b7fe5633f313d36ff582e797
|
[
"Apache-2.0"
] | 1
|
2020-07-14T09:50:51.000Z
|
2020-07-14T09:50:51.000Z
|
tests/test_soft_label_loss.py
|
jiansowa/PaddleSlim
|
a45431c99a775782b7fe5633f313d36ff582e797
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append("../")
import unittest
import paddle.fluid as fluid
from paddleslim.dist import merge, soft_label_loss
from layers import conv_bn_layer
class TestSoftLabelLoss(unittest.TestCase):
def test_soft_label_loss(self):
student_main = fluid.Program()
student_startup = fluid.Program()
with fluid.program_guard(student_main, student_startup):
input = fluid.data(name="image", shape=[None, 3, 224, 224])
conv1 = conv_bn_layer(input, 8, 3, "conv1")
conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
student_predict = conv1 + conv2
teacher_main = fluid.Program()
teacher_startup = fluid.Program()
with fluid.program_guard(teacher_main, teacher_startup):
input = fluid.data(name="image", shape=[None, 3, 224, 224])
conv1 = conv_bn_layer(input, 8, 3, "conv1")
conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
sum1 = conv1 + conv2
conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
sum2 = conv4 + sum1
conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")
place = fluid.CPUPlace()
data_name_map = {'image': 'image'}
merge(teacher_main, student_main, data_name_map, place)
merged_ops = []
for block in student_main.blocks:
for op in block.ops:
merged_ops.append(op.type)
with fluid.program_guard(student_main):
distill_loss = soft_label_loss('teacher_conv6_bn_output.tmp_2',
'conv2_bn_output.tmp_2',
student_main)
loss_ops = []
for block in student_main.blocks:
for op in block.ops:
loss_ops.append(op.type)
self.assertTrue(set(merged_ops).difference(set(loss_ops)) == set())
self.assertTrue(
set(loss_ops).difference(set(merged_ops)) ==
{'cross_entropy', 'softmax', 'reduce_mean', 'scale'})
if __name__ == '__main__':
unittest.main()
| 41.161765
| 75
| 0.629511
|
4a372b24b584f8430924d32fc436f92f6eef6206
| 383
|
py
|
Python
|
TWT/wsgi.py
|
avibn/twtcodejam.net
|
938b9a536dc6407dd86651697abde4f167fc52a8
|
[
"MIT"
] | null | null | null |
TWT/wsgi.py
|
avibn/twtcodejam.net
|
938b9a536dc6407dd86651697abde4f167fc52a8
|
[
"MIT"
] | null | null | null |
TWT/wsgi.py
|
avibn/twtcodejam.net
|
938b9a536dc6407dd86651697abde4f167fc52a8
|
[
"MIT"
] | null | null | null |
"""
WSGI config for TWT project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'TWT.settings')
application = get_wsgi_application()
| 22.529412
| 78
| 0.780679
|
13295be4d0016d9a2f022227735a1b0904a9269f
| 2,405
|
py
|
Python
|
print_info.py
|
typ1c/AutoHS
|
b9e610e1e1301d630cca5acf1ae735890148631b
|
[
"MIT"
] | 49
|
2020-11-24T08:54:31.000Z
|
2022-03-28T13:28:19.000Z
|
print_info.py
|
Huangyan0804/AutoHS
|
e5dbdb3b5366fa8d06926c9f205b32d24d5ed778
|
[
"MIT"
] | 14
|
2021-07-06T14:16:01.000Z
|
2022-02-24T05:01:13.000Z
|
print_info.py
|
Huangyan0804/AutoHS
|
e5dbdb3b5366fa8d06926c9f205b32d24d5ed778
|
[
"MIT"
] | 15
|
2021-01-29T02:44:33.000Z
|
2022-03-24T09:46:59.000Z
|
from constants.constants import *
import os
import time
error_file_handle = None
warn_file_handle = None
debug_file_handle = None
sys_file_handle = None
info_file_handle = None
def print_info_init():
global error_file_handle
global warn_file_handle
global debug_file_handle
global sys_file_handle
global info_file_handle
if not os.path.exists("./log/"):
os.mkdir("./log/")
error_file_handle = open("./log/error_log.txt", "w", encoding="utf8")
warn_file_handle = open("./log/warn_log.txt", "w", encoding="utf8")
debug_file_handle = open("./log/debug_log.txt", "w", encoding="utf8")
sys_file_handle = open("./log/sys_log.txt", "w", encoding="utf8")
info_file_handle = open("./log/info_log.txt", "w", encoding="utf8")
def print_info_close():
global error_file_handle
global warn_file_handle
global debug_file_handle
global sys_file_handle
global info_file_handle
error_file_handle.close()
error_file_handle = None
warn_file_handle.close()
warn_file_handle = None
debug_file_handle.close()
debug_file_handle = None
sys_file_handle.close()
sys_file_handle = None
info_file_handle.close()
info_file_handle = None
def current_time():
return time.strftime("%H:%M:%S", time.localtime())
def error_print(error_str):
error_str = f"[{current_time()} ERROR] {error_str}"
if ERROR_PRINT:
print(error_str)
if ERROR_FILE_WRITE and error_file_handle:
error_file_handle.write(error_str + "\n")
def warn_print(warn_str):
warn_str = f"[{current_time()} WARN] {warn_str}"
if WARN_PRINT:
print(warn_str)
if WARN_FILE_WRITE and warn_file_handle:
warn_file_handle.write(warn_str+ "\n")
def debug_print(debug_str=""):
debug_str = f"[{current_time()} DEBUG] {debug_str}"
if DEBUG_PRINT:
print(debug_str)
if DEBUG_FILE_WRITE and debug_file_handle:
debug_file_handle.write(debug_str + "\n")
def sys_print(sys_str):
sys_str = f"[{current_time()} SYS] {sys_str}"
if SYS_PRINT:
print(sys_str)
if SYS_FILE_WRITE and sys_file_handle:
sys_file_handle.write(sys_str + "\n")
def info_print(info_str):
info_str = f"[{current_time()} INFO] {info_str}"
if INFO_PRINT:
print(info_str)
if INFO_FILE_WRITE and info_file_handle:
info_file_handle.write(info_str + "\n")
| 25.860215
| 73
| 0.692308
|
5ba97a28f8525f6f2700e0c64246386b7e557a57
| 365
|
py
|
Python
|
aether/api/urls.py
|
katajakasa/aetherguild4
|
a7e294f0cff11e2508751f1013e6648fdc56bb94
|
[
"MIT"
] | null | null | null |
aether/api/urls.py
|
katajakasa/aetherguild4
|
a7e294f0cff11e2508751f1013e6648fdc56bb94
|
[
"MIT"
] | 1
|
2021-06-10T17:36:11.000Z
|
2021-06-10T17:36:11.000Z
|
aether/api/urls.py
|
katajakasa/aetherguild4
|
a7e294f0cff11e2508751f1013e6648fdc56bb94
|
[
"MIT"
] | null | null | null |
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from .viewsets import ForumPostViewSet
app_name = 'api'
router = DefaultRouter()
router.register(r'posts', ForumPostViewSet, 'forumpost')
urlpatterns = [
path('', include(router.urls)),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
| 24.333333
| 81
| 0.753425
|
a400e2c004a9fa928335bd9b83369a17f4076353
| 154
|
py
|
Python
|
website/addons/googledrive/exceptions.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | null | null | null |
website/addons/googledrive/exceptions.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | 1
|
2019-08-16T13:45:12.000Z
|
2019-08-16T13:45:12.000Z
|
website/addons/googledrive/exceptions.py
|
lbanner/osf.io
|
1898ef0ff8bd91713e94c60e7463b5f81ac62caa
|
[
"Apache-2.0"
] | null | null | null |
from website.addons.base.exceptions import AddonError
class GoogleDriveError(AddonError):
pass
class ExpiredAuthError(GoogleDriveError):
pass
| 15.4
| 53
| 0.798701
|
0fa251d632470886435bd69b33d9b2c2eb61ed88
| 11,173
|
py
|
Python
|
google/cloud/dataproc_v1/services/cluster_controller/transports/base.py
|
medb/python-dataproc
|
6a2a9afb3d0fc81b9abffa70c31746f6ca428aba
|
[
"Apache-2.0"
] | 34
|
2020-06-12T05:25:02.000Z
|
2022-03-13T22:14:12.000Z
|
google/cloud/dataproc_v1/services/cluster_controller/transports/base.py
|
medb/python-dataproc
|
6a2a9afb3d0fc81b9abffa70c31746f6ca428aba
|
[
"Apache-2.0"
] | 104
|
2020-02-05T12:35:54.000Z
|
2022-03-30T23:17:03.000Z
|
google/cloud/dataproc_v1/services/cluster_controller/transports/base.py
|
medb/python-dataproc
|
6a2a9afb3d0fc81b9abffa70c31746f6ca428aba
|
[
"Apache-2.0"
] | 23
|
2020-02-05T23:13:14.000Z
|
2022-03-29T22:56:44.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
import pkg_resources
import google.auth # type: ignore
import google.api_core
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.dataproc_v1.types import clusters
from google.longrunning import operations_pb2 # type: ignore
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-dataproc",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class ClusterControllerTransport(abc.ABC):
"""Abstract transport class for ClusterController."""
AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
DEFAULT_HOST: str = "dataproc.googleapis.com"
def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_cluster: gapic_v1.method.wrap_method(
self.create_cluster,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.update_cluster: gapic_v1.method.wrap_method(
self.update_cluster,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.stop_cluster: gapic_v1.method.wrap_method(
self.stop_cluster, default_timeout=None, client_info=client_info,
),
self.start_cluster: gapic_v1.method.wrap_method(
self.start_cluster, default_timeout=None, client_info=client_info,
),
self.delete_cluster: gapic_v1.method.wrap_method(
self.delete_cluster,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.get_cluster: gapic_v1.method.wrap_method(
self.get_cluster,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.list_clusters: gapic_v1.method.wrap_method(
self.list_clusters,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.DeadlineExceeded,
core_exceptions.InternalServerError,
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
self.diagnose_cluster: gapic_v1.method.wrap_method(
self.diagnose_cluster,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
core_exceptions.ServiceUnavailable,
),
deadline=300.0,
),
default_timeout=300.0,
client_info=client_info,
),
}
def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError()
@property
def operations_client(self):
"""Return the client designed to process long-running operations."""
raise NotImplementedError()
@property
def create_cluster(
self,
) -> Callable[
[clusters.CreateClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def update_cluster(
self,
) -> Callable[
[clusters.UpdateClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def stop_cluster(
self,
) -> Callable[
[clusters.StopClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def start_cluster(
self,
) -> Callable[
[clusters.StartClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def delete_cluster(
self,
) -> Callable[
[clusters.DeleteClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
@property
def get_cluster(
self,
) -> Callable[
[clusters.GetClusterRequest],
Union[clusters.Cluster, Awaitable[clusters.Cluster]],
]:
raise NotImplementedError()
@property
def list_clusters(
self,
) -> Callable[
[clusters.ListClustersRequest],
Union[clusters.ListClustersResponse, Awaitable[clusters.ListClustersResponse]],
]:
raise NotImplementedError()
@property
def diagnose_cluster(
self,
) -> Callable[
[clusters.DiagnoseClusterRequest],
Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]],
]:
raise NotImplementedError()
__all__ = ("ClusterControllerTransport",)
| 36.275974
| 101
| 0.594558
|
147049094514e67bb8cdeca4e38bc758ed5db155
| 7,545
|
py
|
Python
|
linux_utils/fstab.py
|
JAvito-GC/Linux-Utils
|
41d8905063380f0e27475063ffaaf1a9edca6867
|
[
"MIT"
] | 4
|
2018-10-20T15:49:07.000Z
|
2020-12-03T03:44:52.000Z
|
linux_utils/fstab.py
|
JAvito-GC/Linux-Utils
|
41d8905063380f0e27475063ffaaf1a9edca6867
|
[
"MIT"
] | null | null | null |
linux_utils/fstab.py
|
JAvito-GC/Linux-Utils
|
41d8905063380f0e27475063ffaaf1a9edca6867
|
[
"MIT"
] | 4
|
2017-10-18T12:49:42.000Z
|
2022-03-09T16:21:09.000Z
|
# linux-utils: Linux system administration tools for Python.
#
# Author: Peter Odding <peter@peterodding.com>
# Last Change: February 9, 2020
# URL: https://linux-utils.readthedocs.io
"""
Parsing of `/etc/fstab`_ configuration files.
.. _/etc/fstab: https://manpages.debian.org/fstab
"""
# Standard library modules.
import logging
# External dependencies.
from humanfriendly.text import split
from property_manager import lazy_property
# Modules included in our package.
from linux_utils import coerce_device_file
from linux_utils.tabfile import TabFileEntry, parse_tab_file
# Public identifiers that require documentation.
__all__ = (
'FileSystemEntry',
'find_mounted_filesystems',
'logger',
'parse_fstab',
)
# Initialize a logger for this module.
logger = logging.getLogger(__name__)
def find_mounted_filesystems(filename='/proc/mounts', context=None):
"""
Get information about mounted filesystem from ``/proc/mounts``.
:param filename: The absolute pathname of the file to parse (a string,
defaults to ``/proc/mounts``).
:param context: See :func:`.coerce_context()` for details.
:returns: A generator of :class:`FileSystemEntry` objects.
This function is a trivial wrapper for :func:`parse_fstab()` that instructs
it to parse ``/proc/mounts`` instead of `/etc/fstab`_. Here's an example:
>>> from humanfriendly import format_table
>>> from linux_utils.fstab import find_mounted_filesystems
>>> print(format_table(
... data=[
... (entry.mount_point, entry.device_file, entry.vfs_type)
... for entry in find_mounted_filesystems()
... if entry.vfs_type not in (
... # While writing this example I was actually surprised to
... # see how many `virtual filesystems' a modern Linux system
... # has mounted by default (based on Ubuntu 16.04).
... 'autofs', 'cgroup', 'debugfs', 'devpts', 'devtmpfs', 'efivarfs',
... 'fuse.gvfsd-fuse', 'fusectl', 'hugetlbfs', 'mqueue', 'proc',
... 'pstore', 'securityfs', 'sysfs', 'tmpfs',
... )
... ],
... column_names=["Mount point", "Device", "Type"],
... ))
---------------------------------------------------
| Mount point | Device | Type |
---------------------------------------------------
| / | /dev/mapper/internal-root | ext4 |
| /boot | /dev/sda5 | ext4 |
| /boot/efi | /dev/sda1 | vfat |
| /mnt/backups | /dev/mapper/backups | ext4 |
---------------------------------------------------
"""
return parse_fstab(filename=filename, context=context)
def parse_fstab(filename='/etc/fstab', context=None):
"""
Parse the Linux configuration file `/etc/fstab`_.
:param filename: The absolute pathname of the file to parse (a string,
defaults to `/etc/fstab`_).
:param context: See :func:`.coerce_context()` for details.
:returns: A generator of :class:`FileSystemEntry` objects.
Here's an example:
>>> from linux_utils.fstab import parse_fstab
>>> next(e for e in parse_fstab() if e.mount_point == '/')
FileSystemEntry(
check_order=1,
configuration_file='/etc/fstab',
device='UUID=147f7d18-e0c9-499c-8791-401642581b90',
device_file='/dev/disk/by-uuid/147f7d18-e0c9-499c-8791-401642581b90',
dump_frequency=0,
line_number=11,
mount_point='/',
options=['defaults', 'errors=remount-ro', 'discard', 'relatime', 'data=ordered'],
vfs_type='ext4',
)
Note that some miscellaneous :class:`FileSystemEntry` properties
were omitted from the example above to make it more concise.
"""
for entry in parse_tab_file(filename=filename, context=context):
if len(entry.tokens) >= 4:
# Transform the object into our type.
entry.__class__ = FileSystemEntry
yield entry
elif len(entry.tokens) > 0:
logger.warning("Ignoring line %i in %s because I couldn't parse it!",
entry.line_number, entry.configuration_file)
class FileSystemEntry(TabFileEntry):
"""
An entry parsed from `/etc/fstab`_.
Each entry in the fstab file has six fields, these are mapped to the
following properties:
1. :attr:`device`
2. :attr:`mount_point`
3. :attr:`vfs_type`
4. :attr:`options`
5. :attr:`dump_frequency`
6. :attr:`check_order`
Refer to the `fstab man page`_ for more information about the meaning of
each of these fields. The values of the following properties are computed
based on the six fields above:
- :attr:`device_file`
- :attr:`nfs_directory`
- :attr:`nfs_server`
.. _fstab man page: https://manpages.debian.org/fstab
"""
@lazy_property
def check_order(self):
"""The order in which the filesystem should be checked at boot time (an integer number, defaults to 0)."""
try:
return int(self.tokens[5])
except IndexError:
return 0
@property
def device(self):
"""
The block special device or remote filesystem to be mounted (a string).
The value of this property may be a ``UUID=...`` expression.
"""
return self.tokens[0]
@lazy_property
def device_file(self):
"""
The block special device to be mounted (a string).
The value of this property is computed by passing :attr:`device` to
:func:`.coerce_device_file()`.
"""
return coerce_device_file(self.device)
@lazy_property
def dump_frequency(self):
"""The dump frequency for the filesystem (an integer number, defaults to 0)."""
try:
return int(self.tokens[4])
except IndexError:
return 0
@lazy_property
def mount_point(self):
r"""
The mount point for the filesystem (a string).
Each occurrence of the escape sequence ``\040`` is replaced by a space.
"""
return self.tokens[1].replace(r'\040', ' ')
@lazy_property
def nfs_directory(self):
"""
The directory on the NFS server (a string or :data:`None`).
When :attr:`vfs_type` is ``nfs`` or ``nfs4`` and :attr:`device` is of
the form ``<server>:<directory>`` the value of :attr:`nfs_directory`
will be the part *after* the colon (``:``).
"""
if self.vfs_type in ('nfs', 'nfs4'):
server, _, directory = self.device.partition(':')
if server and directory:
return directory
@lazy_property
def nfs_server(self):
"""
The host name or IP address of the NFS server (a string or :data:`None`).
When :attr:`vfs_type` is ``nfs`` or ``nfs4`` and :attr:`device` is of
the form ``<server>:<directory>`` the value of :attr:`nfs_server` will
be the part *before* the colon (``:``).
"""
if self.vfs_type in ('nfs', 'nfs4'):
server, _, directory = self.device.partition(':')
if server and directory:
return server
@lazy_property
def options(self):
"""The mount options for the filesystem (a list of strings)."""
return split(self.tokens[3])
@property
def vfs_type(self):
"""The type of filesystem (a string like 'ext4' or 'xfs')."""
return self.tokens[2]
| 33.986486
| 114
| 0.599867
|
ff695aff415cf94cdec11ee32b5178b9e886efe0
| 1,028
|
py
|
Python
|
theano/compile/__init__.py
|
mrgloom/Theano
|
018c7fa9a292858486b92e03a5d0a36cb3e45e1f
|
[
"BSD-3-Clause"
] | 1
|
2020-12-27T13:50:59.000Z
|
2020-12-27T13:50:59.000Z
|
theano/compile/__init__.py
|
mrgloom/Theano
|
018c7fa9a292858486b92e03a5d0a36cb3e45e1f
|
[
"BSD-3-Clause"
] | null | null | null |
theano/compile/__init__.py
|
mrgloom/Theano
|
018c7fa9a292858486b92e03a5d0a36cb3e45e1f
|
[
"BSD-3-Clause"
] | null | null | null |
from theano.compile.ops import (
DeepCopyOp, deep_copy_op, register_deep_copy_op_c_code,
Shape, shape, register_shape_c_code,
Shape_i, register_shape_i_c_code,
ViewOp, view_op, register_view_op_c_code, FromFunctionOp,
as_op, Rebroadcast, register_rebroadcast_c_code,
SpecifyShape, specify_shape, register_specify_shape_c_code)
from theano.compile.function_module import *
from theano.compile.mode import *
from theano.compile.io import *
from theano.compile.module import *
from theano.compile.debugmode import DebugMode
from theano.compile.monitormode import MonitorMode
from theano.compile.profiling import ProfileStats, ScanProfileStats
from theano.compile.profilemode import ProfileMode
from theano.compile.sharedvalue import (shared, shared_constructor,
SharedVariable)
from theano.compile.pfunc import pfunc, Param, rebuild_collect_shared
from theano.compile.builders import *
from theano.compile.function import function
| 32.125
| 69
| 0.774319
|
93dbe87c3fd25e67aa0d3bbea326d20f9b3882e0
| 23,069
|
py
|
Python
|
rpyc/utils/server.py
|
odarr/rpyc
|
ed0c9090d942e628fb3069feade66bec904f7127
|
[
"MIT"
] | 1
|
2021-07-11T09:10:58.000Z
|
2021-07-11T09:10:58.000Z
|
rpyc/utils/server.py
|
odarr/rpyc
|
ed0c9090d942e628fb3069feade66bec904f7127
|
[
"MIT"
] | null | null | null |
rpyc/utils/server.py
|
odarr/rpyc
|
ed0c9090d942e628fb3069feade66bec904f7127
|
[
"MIT"
] | null | null | null |
"""
rpyc plug-in server (threaded or forking)
"""
import sys
import os
import socket
import time
import threading # noqa: F401
import errno
import logging
from contextlib import closing
try:
import Queue
except ImportError:
import queue as Queue
from rpyc.core import SocketStream, Channel
from rpyc.utils.registry import UDPRegistryClient
from rpyc.utils.authenticators import AuthenticationError
from rpyc.lib import safe_import, spawn, spawn_waitready
from rpyc.lib.compat import poll, get_exc_errno
signal = safe_import("signal")
gevent = safe_import("gevent")
class Server(object):
"""Base server implementation
:param service: the :class:`~rpyc.core.service.Service` to expose
:param hostname: the host to bind to. By default, the 'wildcard address' is used to listen on all interfaces.
if not properly secured, the server can receive traffic from unintended or even malicious sources.
:param ipv6: whether to create an IPv6 or IPv4 socket. The default is IPv4
:param port: the TCP port to bind to
:param backlog: the socket's backlog (passed to ``listen()``)
:param reuse_addr: whether or not to create the socket with the ``SO_REUSEADDR`` option set.
:param authenticator: the :ref:`api-authenticators` to use. If ``None``, no authentication
is performed.
:param registrar: the :class:`~rpyc.utils.registry.RegistryClient` to use.
If ``None``, a default :class:`~rpyc.utils.registry.UDPRegistryClient`
will be used
:param auto_register: whether or not to register using the *registrar*. By default, the
server will attempt to register only if a registrar was explicitly given.
:param protocol_config: the :data:`configuration dictionary <rpyc.core.protocol.DEFAULT_CONFIG>`
that is passed to the RPyC connection
:param logger: the ``logger`` to use (of the built-in ``logging`` module). If ``None``, a
default logger will be created.
:param listener_timeout: the timeout of the listener socket; set to ``None`` to disable (e.g.
on embedded platforms with limited battery)
"""
def __init__(self, service, hostname=None, ipv6=False, port=0,
backlog=socket.SOMAXCONN, reuse_addr=True, authenticator=None, registrar=None,
auto_register=None, protocol_config=None, logger=None, listener_timeout=0.5,
socket_path=None):
self.active = False
self._closed = False
self.service = service
self.authenticator = authenticator
self.backlog = backlog
if auto_register is None:
self.auto_register = bool(registrar)
else:
self.auto_register = auto_register
if protocol_config is None:
protocol_config = {}
self.protocol_config = protocol_config
self.clients = set()
if socket_path is not None:
if hostname is not None or port != 0 or ipv6 is not False:
raise ValueError("socket_path is mutually exclusive with: hostname, port, ipv6")
self.listener = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
self.listener.bind(socket_path)
# set the self.port to the path as it's used for the registry and logging
self.host, self.port = "", socket_path
else:
if ipv6:
family = socket.AF_INET6
else:
family = socket.AF_INET
self.listener = socket.socket(family, socket.SOCK_STREAM)
address = socket.getaddrinfo(hostname, port, family=family, type=socket.SOCK_STREAM, proto=socket.IPPROTO_TCP, flags=socket.AI_PASSIVE)[0][-1]
if reuse_addr and sys.platform != "win32":
# warning: reuseaddr is not what you'd expect on windows!
# it allows you to bind an already bound port, resulting in
# "unexpected behavior" (quoting MSDN)
self.listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.listener.bind(address)
self.listener.settimeout(listener_timeout)
# hack for IPv6 (the tuple can be longer than 2)
sockname = self.listener.getsockname()
self.host, self.port = sockname[0], sockname[1]
if logger is None:
logger = logging.getLogger("%s/%s" % (self.service.get_service_name(), self.port))
self.logger = logger
if "logger" not in self.protocol_config:
self.protocol_config["logger"] = self.logger
if registrar is None:
registrar = UDPRegistryClient(logger=self.logger)
self.registrar = registrar
def close(self):
"""Closes (terminates) the server and all of its clients. If applicable,
also unregisters from the registry server"""
if self._closed:
return
self._closed = True
self.active = False
if self.auto_register:
try:
self.registrar.unregister(self.port)
except Exception:
self.logger.exception("error unregistering services")
try:
self.listener.shutdown(socket.SHUT_RDWR)
except (EnvironmentError, socket.error):
pass
self.listener.close()
self.logger.info("listener closed")
for c in set(self.clients):
try:
c.shutdown(socket.SHUT_RDWR)
except Exception:
pass
c.close()
self.clients.clear()
def fileno(self):
"""returns the listener socket's file descriptor"""
return self.listener.fileno()
def accept(self):
"""accepts an incoming socket connection (blocking)"""
while self.active:
try:
sock, addrinfo = self.listener.accept()
except socket.timeout:
pass
except socket.error:
ex = sys.exc_info()[1]
if get_exc_errno(ex) in (errno.EINTR, errno.EAGAIN):
pass
else:
raise EOFError()
else:
break
if not self.active:
return
sock.setblocking(True)
self.logger.info("accepted %s with fd %s", addrinfo, sock.fileno())
self.clients.add(sock)
self._accept_method(sock)
def _accept_method(self, sock):
"""this method should start a thread, fork a child process, or
anything else in order to serve the client. once the mechanism has
been created, it should invoke _authenticate_and_serve_client with
`sock` as the argument"""
raise NotImplementedError
def _authenticate_and_serve_client(self, sock):
try:
if self.authenticator:
addrinfo = sock.getpeername()
try:
sock2, credentials = self.authenticator(sock)
except AuthenticationError:
self.logger.info("%s failed to authenticate, rejecting connection", addrinfo)
return
else:
self.logger.info("%s authenticated successfully", addrinfo)
else:
credentials = None
sock2 = sock
try:
self._serve_client(sock2, credentials)
except Exception:
self.logger.exception("client connection terminated abruptly")
raise
finally:
try:
sock.shutdown(socket.SHUT_RDWR)
except Exception:
pass
closing(sock)
self.clients.discard(sock)
def _serve_client(self, sock, credentials):
addrinfo = sock.getpeername()
if credentials:
self.logger.info("welcome %s (%r)", addrinfo, credentials)
else:
self.logger.info("welcome %s", addrinfo)
try:
config = dict(self.protocol_config, credentials=credentials,
endpoints=(sock.getsockname(), addrinfo), logger=self.logger)
conn = self.service._connect(Channel(SocketStream(sock)), config)
self._handle_connection(conn)
finally:
self.logger.info("goodbye %s", addrinfo)
def _handle_connection(self, conn):
"""This methoed should implement the server's logic."""
conn.serve_all()
def _bg_register(self):
interval = self.registrar.REREGISTER_INTERVAL
self.logger.info("started background auto-register thread "
"(interval = %s)", interval)
tnext = 0
try:
while self.active:
t = time.time()
if t >= tnext:
did_register = False
aliases = self.service.get_service_aliases()
try:
did_register = self.registrar.register(aliases, self.port, interface=self.host)
except Exception:
self.logger.exception("error registering services")
# If registration worked out, retry to register again after
# interval time. Otherwise, try to register soon again.
if did_register:
tnext = t + interval
else:
self.logger.info("registering services did not work - retry")
time.sleep(1)
finally:
if not self._closed:
self.logger.info("background auto-register thread finished")
def _listen(self):
if self.active:
return
self.listener.listen(self.backlog)
# On Jython, if binding to port 0, we can get the correct port only
# once `listen()` was called, see #156:
if not self.port:
# Note that for AF_UNIX the following won't work (but we are safe
# since we already saved the socket_path into self.port):
self.port = self.listener.getsockname()[1]
self.logger.info("server started on [%s]:%s", self.host, self.port)
self.active = True
def _register(self):
if self.auto_register:
spawn(self._bg_register)
def start(self):
"""Starts the server (blocking). Use :meth:`close` to stop"""
self._listen()
self._register()
try:
while self.active:
self.accept()
except EOFError:
pass # server closed by another thread
except KeyboardInterrupt:
print("")
self.logger.warn("keyboard interrupt!")
finally:
self.logger.info("server has terminated")
self.close()
def _start_in_thread(self):
"""
Start the server in a thread, returns when when server is listening and
ready to accept incoming connections.
Used for testing, API could change anytime! Do not use!"""
return spawn_waitready(self._listen, self.start)[0]
class OneShotServer(Server):
"""
A server that handles a single connection (blockingly), and terminates after that
Parameters: see :class:`Server`
"""
def _accept_method(self, sock):
try:
self._authenticate_and_serve_client(sock)
finally:
self.close()
class ThreadedServer(Server):
"""
A server that spawns a thread for each connection. Works on any platform
that supports threads.
Parameters: see :class:`Server`
"""
def _accept_method(self, sock):
spawn(self._authenticate_and_serve_client, sock)
class ThreadPoolServer(Server):
"""This server is threaded like the ThreadedServer but reuses threads so that
recreation is not necessary for each request. The pool of threads has a fixed
size that can be set with the 'nbThreads' argument. The default size is 20.
The server dispatches request to threads by batch, that is a given thread may process
up to request_batch_size requests from the same connection in one go, before it goes to
the next connection with pending requests. By default, self.request_batch_size
is set to 10 and it can be overwritten in the constructor arguments.
Contributed by *@sponce*
Parameters: see :class:`Server`
"""
def __init__(self, *args, **kwargs):
'''Initializes a ThreadPoolServer. In particular, instantiate the thread pool.'''
# get the number of threads in the pool
self.nbthreads = kwargs.pop('nbThreads', 20)
self.request_batch_size = kwargs.pop('requestBatchSize', 10)
# init the parent
Server.__init__(self, *args, **kwargs)
# a queue of connections having something to process
self._active_connection_queue = Queue.Queue()
# a dictionary fd -> connection
self.fd_to_conn = {}
# a polling object to be used be the polling thread
self.poll_object = poll()
def _listen(self):
if self.active:
return
super(ThreadPoolServer, self)._listen()
# setup the thread pool for handling requests
self.workers = []
for i in range(self.nbthreads):
t = spawn(self._serve_clients)
t.setName('Worker%i' % i)
self.workers.append(t)
# setup a thread for polling inactive connections
self.polling_thread = spawn(self._poll_inactive_clients)
self.polling_thread.setName('PollingThread')
def close(self):
'''closes a ThreadPoolServer. In particular, joins the thread pool.'''
# close parent server
Server.close(self)
# stop producer thread
self.polling_thread.join()
# cleanup thread pool : first fill the pool with None fds so that all threads exit
# the blocking get on the queue of active connections. Then join the threads
for _ in range(len(self.workers)):
self._active_connection_queue.put(None)
for w in self.workers:
w.join()
def _remove_from_inactive_connection(self, fd):
'''removes a connection from the set of inactive ones'''
# unregister the connection in the polling object
try:
self.poll_object.unregister(fd)
except KeyError:
# the connection has already been unregistered
pass
def _drop_connection(self, fd):
'''removes a connection by closing it and removing it from internal structs'''
conn = None
# cleanup fd_to_conn dictionnary
try:
conn = self.fd_to_conn[fd]
del self.fd_to_conn[fd]
except KeyError:
# the active connection has already been removed
pass
# close connection
self.logger.info("Closing connection for fd %d", fd)
if conn:
conn.close()
def _add_inactive_connection(self, fd):
'''adds a connection to the set of inactive ones'''
self.poll_object.register(fd, "reh")
def _handle_poll_result(self, connlist):
'''adds a connection to the set of inactive ones'''
for fd, evt in connlist:
try:
# remove connection from the inactive ones
self._remove_from_inactive_connection(fd)
# Is it an error ?
if "e" in evt or "n" in evt or "h" in evt:
# it was an error, connection was closed. Do the same on our side
self._drop_connection(fd)
else:
# connection has data, let's add it to the active queue
self._active_connection_queue.put(fd)
except KeyError:
# the connection has already been dropped. Give up
pass
def _poll_inactive_clients(self):
'''Main method run by the polling thread of the thread pool.
Check whether inactive clients have become active'''
while self.active:
try:
# the actual poll, with a timeout of 0.1s so that we can exit in case
# we re not active anymore
active_clients = self.poll_object.poll(0.1)
# for each client that became active, put them in the active queue
self._handle_poll_result(active_clients)
except Exception:
ex = sys.exc_info()[1]
# "Caught exception in Worker thread" message
self.logger.warning("Failed to poll clients, caught exception : %s", str(ex))
# wait a bit so that we do not loop too fast in case of error
time.sleep(0.2)
def _serve_requests(self, fd):
'''Serves requests from the given connection and puts it back to the appropriate queue'''
# serve a maximum of RequestBatchSize requests for this connection
for _ in range(self.request_batch_size):
try:
if not self.fd_to_conn[fd].poll(): # note that poll serves the request
# we could not find a request, so we put this connection back to the inactive set
self._add_inactive_connection(fd)
return
except EOFError:
# the connection has been closed by the remote end. Close it on our side and return
self._drop_connection(fd)
return
except Exception:
# put back the connection to active queue in doubt and raise the exception to the upper level
self._active_connection_queue.put(fd)
raise
# we've processed the maximum number of requests. Put back the connection in the active queue
self._active_connection_queue.put(fd)
def _serve_clients(self):
'''Main method run by the processing threads of the thread pool.
Loops forever, handling requests read from the connections present in the active_queue'''
while self.active:
try:
# note that we do not use a timeout here. This is because the implementation of
# the timeout version performs badly. So we block forever, and exit by filling
# the queue with None fds
fd = self._active_connection_queue.get(True)
# fd may be None (case where we want to exit the blocking get to close the service)
if fd:
# serve the requests of this connection
self._serve_requests(fd)
except Queue.Empty:
# we've timed out, let's just retry. We only use the timeout so that this
# thread can stop even if there is nothing in the queue
pass
except Exception:
# "Caught exception in Worker thread" message
self.logger.exception("failed to serve client, caught exception")
# wait a bit so that we do not loop too fast in case of error
time.sleep(0.2)
def _authenticate_and_build_connection(self, sock):
'''Authenticate a client and if it succeeds, wraps the socket in a connection object.
Note that this code is cut and paste from the rpyc internals and may have to be
changed if rpyc evolves'''
# authenticate
if self.authenticator:
sock, credentials = self.authenticator(sock)
else:
credentials = None
# build a connection
addrinfo = sock.getpeername()
config = dict(self.protocol_config, credentials=credentials, connid="{}".format(addrinfo),
endpoints=(sock.getsockname(), addrinfo))
return sock, self.service._connect(Channel(SocketStream(sock)), config)
def _accept_method(self, sock):
'''Implementation of the accept method : only pushes the work to the internal queue.
In case the queue is full, raises an AsynResultTimeout error'''
try:
addrinfo = None
# authenticate and build connection object
sock, conn = self._authenticate_and_build_connection(sock)
# put the connection in the active queue
addrinfo = sock.getpeername()
fd = conn.fileno()
self.logger.debug("Created connection to %s with fd %d", addrinfo, fd)
self.fd_to_conn[fd] = conn
self._add_inactive_connection(fd)
self.clients.clear()
except Exception:
err_msg = "Failed to serve client for {}, caught exception".format(addrinfo)
self.logger.exception(err_msg)
sock.close()
class ForkingServer(Server):
"""
A server that forks a child process for each connection. Available on
POSIX compatible systems only.
Parameters: see :class:`Server`
"""
def __init__(self, *args, **kwargs):
if not signal:
raise OSError("ForkingServer not supported on this platform")
Server.__init__(self, *args, **kwargs)
# setup sigchld handler
self._prevhandler = signal.signal(signal.SIGCHLD, self._handle_sigchld)
def close(self):
Server.close(self)
signal.signal(signal.SIGCHLD, self._prevhandler)
@classmethod
def _handle_sigchld(cls, signum, unused):
try:
while True:
pid, dummy = os.waitpid(-1, os.WNOHANG)
if pid <= 0:
break
except OSError:
pass
# re-register signal handler (see man signal(2), under Portability)
signal.signal(signal.SIGCHLD, cls._handle_sigchld)
def _accept_method(self, sock):
pid = os.fork()
if pid == 0:
# child
try:
self.logger.debug("child process created")
signal.signal(signal.SIGCHLD, self._prevhandler)
# 76: call signal.siginterrupt(False) in forked child
signal.siginterrupt(signal.SIGCHLD, False)
self.listener.close()
self.clients.clear()
self._authenticate_and_serve_client(sock)
except Exception:
self.logger.exception("child process terminated abnormally")
else:
self.logger.debug("child process terminated")
finally:
self.logger.debug("child terminated")
os._exit(0)
else:
# parent
sock.close()
self.clients.discard(sock)
class GeventServer(Server):
"""gevent based Server. Requires using ``gevent.monkey.patch_all()``."""
def _register(self):
if self.auto_register:
gevent.spawn(self._bg_register)
def _accept_method(self, sock):
gevent.spawn(self._authenticate_and_serve_client, sock)
| 40.401051
| 154
| 0.601153
|
13edad565e957adbe01f25b6814992fa151b1da3
| 2,428
|
py
|
Python
|
cloudbaseinit/utils/network.py
|
noma4i/cloudbase-init
|
cf2dec9dc108b574e2b5da954151fbd347adff60
|
[
"Apache-2.0"
] | 1
|
2019-04-16T11:40:36.000Z
|
2019-04-16T11:40:36.000Z
|
cloudbaseinit/utils/network.py
|
noma4i/cloudbase-init
|
cf2dec9dc108b574e2b5da954151fbd347adff60
|
[
"Apache-2.0"
] | null | null | null |
cloudbaseinit/utils/network.py
|
noma4i/cloudbase-init
|
cf2dec9dc108b574e2b5da954151fbd347adff60
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import urllib2
import urlparse
from cloudbaseinit.openstack.common import log as logging
from cloudbaseinit.osutils import factory as osutils_factory
LOG = logging.getLogger(__name__)
MAX_URL_CHECK_RETRIES = 3
def check_url(url, retries_count=MAX_URL_CHECK_RETRIES):
for i in range(0, MAX_URL_CHECK_RETRIES):
try:
LOG.debug("Testing url: %s" % url)
urllib2.urlopen(url)
return True
except Exception:
pass
return False
def check_metadata_ip_route(metadata_url):
'''
Workaround for: https://bugs.launchpad.net/quantum/+bug/1174657
'''
osutils = osutils_factory.get_os_utils()
if sys.platform == 'win32' and osutils.check_os_version(6, 0):
# 169.254.x.x addresses are not getting routed starting from
# Windows Vista / 2008
metadata_netloc = urlparse.urlparse(metadata_url).netloc
metadata_host = metadata_netloc.split(':')[0]
if metadata_host.startswith("169.254."):
if (not osutils.check_static_route_exists(metadata_host) and
not check_url(metadata_url)):
(interface_index, gateway) = osutils.get_default_gateway()
if gateway:
try:
LOG.debug('Setting gateway for host: %s',
metadata_host)
osutils.add_static_route(metadata_host,
"255.255.255.255",
gateway,
interface_index,
10)
except Exception, ex:
# Ignore it
LOG.exception(ex)
| 36.787879
| 78
| 0.593493
|
0e608e72f8ff463b75d345fd24b754bfbe49af12
| 1,242
|
py
|
Python
|
__init__.py
|
nonebot/nonebot-alarm
|
11cc74b90a0a0dd954bb529bec208b43a1a55035
|
[
"MIT"
] | 4
|
2020-04-20T13:21:23.000Z
|
2021-12-08T12:47:01.000Z
|
__init__.py
|
nonebot/nonebot-alarm
|
11cc74b90a0a0dd954bb529bec208b43a1a55035
|
[
"MIT"
] | 1
|
2022-02-18T05:31:55.000Z
|
2022-02-18T05:31:55.000Z
|
__init__.py
|
nonebot/nonebot-alarm
|
11cc74b90a0a0dd954bb529bec208b43a1a55035
|
[
"MIT"
] | 2
|
2020-08-16T11:26:58.000Z
|
2022-02-23T15:05:53.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
@Author : yanyongyu
@Date : 2020-04-14 20:45:51
@LastEditors: yanyongyu
@LastEditTime: 2020-04-14 21:40:51
@Description : None
@GitHub : https://github.com/yanyongyu
"""
__author__ = "yanyongyu"
from nonebot import get_bot
__plugin_name__ = "alarm"
def __plugin_usage__(target: str = None, *args, **kwargs):
if target == "name":
return "⏰ 闹钟提醒"
else:
return "⏰ 闹钟提醒\n用自然语言说出你需要提醒的事项即可~"
bot = get_bot()
nickname = getattr(bot.config, "NICKNAME", "我")
nickname = nickname if isinstance(nickname, str) else nickname[0]
EXPR_COULD_NOT = (f"哎鸭,{nickname}没有时光机,这个时间没办法提醒你。",
f"你这是要穿越吗?这个时间{nickname}没办法提醒你。")
EXPR_TOO_LONG = ("很抱歉,现在暂时不能设置超过一个月的提醒呢。",
f"……时间这么久的话,{nickname}可能也记不住。还是换个时间吧。")
EXPR_OK = ("遵命!我会在{time}叫你{action}!\n", "好!我会在{time}提醒你{action}!\n",
"没问题!我一定会在{time}通知你{action}。\n", "好鸭~ 我会准时在{time}提醒你{action}。\n",
"嗯嗯!我会在{time}准时叫你{action}哒\n", "好哦!我会在{time}准时叫你{action}~\n")
EXPR_REMIND = ("提醒通知:\n提醒时间到啦!该{action}了!", "提醒通知:\n你设置的提醒时间已经到了~ 赶快{action}!",
"提醒通知:\n你应该没有忘记{action}吧?", "提醒通知:\n你定下的提醒时间已经到啦!快{action}吧!")
from . import commands, nlp
| 28.883721
| 79
| 0.626409
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.