hexsha stringlengths 40 40 | size int64 1 1.03M | ext stringclasses 10 values | lang stringclasses 1 value | max_stars_repo_path stringlengths 3 239 | max_stars_repo_name stringlengths 5 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 239 | max_issues_repo_name stringlengths 5 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 239 | max_forks_repo_name stringlengths 5 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 1 1.03M | avg_line_length float64 1 958k | max_line_length int64 1 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
acf4b9d7b13225e7f38693ef046abb55374a3641 | 861 | py | Python | winrm/exceptions.py | Synerty/pywinrm | 6e1fd8928d23cb9fe9ef56a9c2f2958fbc81cc98 | [
"MIT"
] | null | null | null | winrm/exceptions.py | Synerty/pywinrm | 6e1fd8928d23cb9fe9ef56a9c2f2958fbc81cc98 | [
"MIT"
] | 1 | 2019-01-30T15:18:57.000Z | 2019-01-30T15:18:57.000Z | winrm/exceptions.py | Synerty/pywinrm | 6e1fd8928d23cb9fe9ef56a9c2f2958fbc81cc98 | [
"MIT"
] | null | null | null | from __future__ import unicode_literals
class WinRMError(Exception):
""""Generic WinRM error"""
code = 500
class WinRMTransportError(Exception):
"""WinRM errors specific to transport-level problems (unexpcted HTTP error codes, etc)"""
code = 500
class WinRMOperationTimeoutError(Exception):
"""
Raised when a WinRM-level operation timeout (not a connection-level timeout) has occurred. This is
considered a normal error that should be retried transparently by the client when waiting for output from
a long-running process.
"""
code = 500
class AuthenticationError(WinRMError):
"""Authorization Error"""
code = 401
class BasicAuthDisabledError(AuthenticationError):
message = 'WinRM/HTTP Basic authentication is not enabled on remote host'
class InvalidCredentialsError(AuthenticationError):
pass | 28.7 | 109 | 0.746806 |
acf4baa42be0a8b370a59f50ec488aa38196a832 | 8,533 | py | Python | SOSAT/constraints/DITF_constraint.py | pnnl/SOSAT | 610f99e0bb80f2f5e7836e7e3b6b816e029838bb | [
"BSD-3-Clause"
] | null | null | null | SOSAT/constraints/DITF_constraint.py | pnnl/SOSAT | 610f99e0bb80f2f5e7836e7e3b6b816e029838bb | [
"BSD-3-Clause"
] | 1 | 2021-03-22T18:59:05.000Z | 2021-03-22T18:59:05.000Z | SOSAT/constraints/DITF_constraint.py | pnnl/SOSAT | 610f99e0bb80f2f5e7836e7e3b6b816e029838bb | [
"BSD-3-Clause"
] | null | null | null | from logging import log
import numpy as np
from numpy import ma
import pint
from .constraint_base import StressConstraint
units = pint.UnitRegistry()
class DITFConstraint(StressConstraint):
"""
A class used to constrain the stress state by the existence or non
existence of drilling-induced tensile fractures (DITF) at the
location being analyzed. Depending on the mud and formation
temperatures, mud weights, and rock strength and whether or
not significant mud losses were observed, if DITF's exist it
generally indicates that the maximum horizontal stress is much
larger than the minimum principal stress.
Attributes
----------
No public attributes
Parameters
----------
DITF_exists : bool
Indication whether or not DITF exist
mud_pressure_dist : subclass of `scipy.stats.rv_continuous`
The probability distribution for the maximum mud pressure
experienced by the relevant section of borehole from the time
that the well was drilled until the log used to identify the
presence or absence of breakouts was run; mud pressure should
be specified in the same pressure unit as is used for UCS and
Young's modulus, but this can be any unit as specified though
the optional `pressure_unit` parameter, which defaults to 'Pa';
conversion from mud weight must be performed by the user of
this class
mud_temperature_dist : subclass of `scipy.stats.rv_continuous`
The probability distribution for the minimum mud temperature;
the minimum value is of interest rather than the average value
since the formation of a DITF is governed by the minimum
value only
tensile_strength_dist : subclass of `scipy.stats.rv_continuous`
The probability distribution for minimum the minimum tensile
strength in the zone being analyzed. DITFs will form at the
weakest portion of the well for a given stress state, so
whether they form or not is dependent on the minimum tensile
strength rather than an average representative value
formation_temperature : float
Formation temperature, which is taken as deterministic since
it is usually not highly uncertain
YM : float
Formation Young's Modulus, which is taken as deterministic
since the formation of DITF is only weakly dependent
on this parameter; should be specified in the same pressure
unit as is used for mud pressure and Young's modulus, but
this can be any unit as specified though the optional
`pressure_unit` parameter, which defaults to 'Pa'
PR : float
Formation Poisson's Ratio, which is taken as deterministic
since the formation of DITFs is only weakly dependent
on this parameter
CTE : float
Formation coefficient of thermal expansion, which is taken
as deterministic since the formation of DITF is only
weakly dependent on this parameter
pressure_unit : str, optional
The unit used for UCS and Young's modulus; should be a unit
recognized by `pint.UnitRegistry`; defaults to 'Pa'
temperature_unit :str, optional
The unit used to specify the mud temperature distribution and
the formation temperature; should be a unit recognized by
`pint.UnitRegistry`; defaults to degrees C ('degC')
Notes
-----
While this class allows users to use any probability distribution
that derives from the `scipy.stats.rv_continuous` class for the mud
temperature, pressure, and formation tensile strength, users are
cautioned against using any distribution that has finite
probability density for negative parameter values, since negative
strength values are not physically meaningful. Therefore, lognormal
distributions are more appropriate than a normal distribution, for
example.
"""
def __init__(self,
DITF_exists,
mud_pressure_dist,
mud_temperature_dist,
tensile_strength_dist,
formation_temperature,
YM,
PR,
CTE,
pressure_unit='Pa'):
"""
Constructor method
"""
self._DITF_exists = DITF_exists
self._mud_pressure_dist = mud_pressure_dist
self._mud_temperature_dist = mud_temperature_dist
self._tensile_strength_dist = tensile_strength_dist
self._formation_temperature = formation_temperature
self._YM = YM * units(pressure_unit)
self._PR = PR
self._CTE = CTE
self._pressure_unit = pressure_unit
def loglikelihood(self, ss):
"""
Computes the likelihood of each stress state given the presence
or absence of DITFs, formation and mud properties specified.
Parameters
----------
ss: `SOSAT.StressState` object
StressState object containing the stress states
over which the likelihood is to be evaluated
Returns
-------
Numpy MaskedArray
The returned object is a Numpy MaskedArray containing the
likelihood for each stress `ss`. The returned array is
masked identically to `ss.shmin_grid`
"""
# compute stress with balanced mud and no temperature difference
sig_nominal = 3.0 * ss.shmin_grid - ss.shmax_grid \
- 2.0 * ss.pore_pressure
# compute thermoelastic factor
TEF = self._CTE * self._YM / (1.0 - self._PR)
# since all temperature-based quantities in the class are
# assumed to be consistent, we do not include pint temperature
# units explicitly the way we do for pressure/stress. This means
# that TEF will only have pressure units. We convert it to
# ss.stress_units here to avoid repeated conversions inside the
# Monte Carlo loop
TEF = TEF.to(ss.stress_unit).magnitude
# use a Monte Carlo sampling scheme to evaluate the probability
# of a DITF forming
NDITF = ma.zeros(np.shape(ss.shmin_grid), dtype=np.int32)
PDITF_new = ma.zeros(np.shape(ss.shmin_grid), dtype=np.float64)
Ntotal = 0
converged = False
iter = 0
while not converged:
# perform 500 iterations at a time and then see if the
# probabiliity has changed meaningfully
for i in range(0, 500):
mud_pressure_i = self._mud_pressure_dist.rvs() \
* units(self._pressure_unit)
# convert to the stress unit of ss
mud_pressure_i = mud_pressure_i \
.to(ss.stress_unit).magnitude
# no unit conversion is needed since all members of
# this calss should have consistent temperature units
mud_temperature_i = self._mud_temperature_dist.rvs()
TS_i = self._tensile_strength_dist.rvs() \
* units(self._pressure_unit)
# convert to stress unit of ss
TS_i = TS_i.to(ss.stress_unit).magnitude
deltaP = mud_pressure_i - ss.pore_pressure
deltaT = mud_temperature_i - self._formation_temperature
DITF = sig_nominal - deltaP - TEF * deltaT + TS_i
NDITF[DITF < 0.0] += 1
iter += 1
Ntotal += 500
if iter > 2:
PDITF_old = PDITF_new
PDITF_new = NDITF / Ntotal
err = ma.MaskedArray.max(PDITF_new - PDITF_old)
if err < 0.01:
converged = True
print("DITF Monte Carlo iteration converged after ",
iter,
" iterations")
# return the most updated estimate for the likelihood of
# DITF formation at each stress state
if self._DITF_exists:
with np.errstate(divide='ignore'):
loglikelihood = np.log(PDITF_new)
return loglikelihood
else:
# we should change this to do the calculation using
# log probabilities and np.log1p to improve numerical
# precision when PDITF_new is close to 1.0
with np.errstate(divide='ignore'):
loglikelihood = np.log1p(- PDITF_new)
return loglikelihood
| 42.665 | 72 | 0.639752 |
acf4bb4801b9000163461ab21eceb3272dd9fc24 | 3,643 | py | Python | lib/smisk/autoreload.py | rsms/smisk | f12a5606dfff49a15fa91448ff36652d60add4c0 | [
"MIT"
] | 4 | 2015-11-05T11:51:12.000Z | 2020-12-30T18:55:58.000Z | lib/smisk/autoreload.py | rsms/smisk | f12a5606dfff49a15fa91448ff36652d60add4c0 | [
"MIT"
] | 5 | 2021-11-16T17:21:51.000Z | 2021-11-16T17:22:09.000Z | lib/smisk/autoreload.py | rsms/smisk | f12a5606dfff49a15fa91448ff36652d60add4c0 | [
"MIT"
] | null | null | null | # encoding: utf-8
'''Automatically reload processes when components are updated.
'''
import sys, os, logging, re
from smisk.util.threads import Monitor
from smisk.config import config
log = logging.getLogger(__name__)
class Autoreloader(Monitor):
'''Reloads application when files change'''
frequency = 1
match = None
def __init__(self, frequency=1, match=None):
'''
:param frequency: How often to perform file modification checks
:type frequency: int
:param match: Only check modules matching this regular expression.
Matches anything if None.
:type match: re.RegExp
'''
self.config_files = set()
self.mtimes = {}
self.log = None # in runner thread -- should not be set manually
self.match = match
Monitor.__init__(self, self.run, self.setup, frequency)
def start(self):
'''Start our own perpetual timer thread for self.run.'''
if self.thread is None:
self.mtimes = {}
self._update_config_files_list()
Monitor.start(self)
start.priority = 70
def _update_config_files_list(self):
config_files = set()
if config.get('smisk.autoreload.config', config.get('smisk.autoreload')):
for path,conf in config.sources:
if path[0] != '<':
config_files.add(path)
self.config_files = config_files
def setup(self):
self.log = logging.getLogger('%s.%s' % (__name__, self.__class__.__name__))
def on_module_modified(self, path):
# The file has been deleted or modified.
self.log.info("%s was modified", path)
self.thread.cancel()
self.log.debug("Stopped autoreload monitor (thread %r)", self.thread.getName())
import smisk.core
smisk.core.app.exit()
def on_config_modified(self, path):
config.reload()
self._update_config_files_list()
def run(self):
'''Reload the process if registered files have been modified.'''
sysfiles = set()
if config.get('smisk.autoreload.modules', config.get('smisk.autoreload')):
for k, m in sys.modules.items():
if self.match is None or self.match.match(k):
if hasattr(m, '__loader__'):
if hasattr(m.__loader__, 'archive'):
k = m.__loader__.archive
k = getattr(m, '__file__', None)
sysfiles.add(k)
for path in sysfiles | self.config_files:
if path:
if path.endswith('.pyc') or path.endswith('.pyo'):
path = path[:-1]
oldtime = self.mtimes.get(path, 0)
if oldtime is None:
# Module with no .py file. Skip it.
continue
#self.log.info('Checking %r' % sysfiles)
try:
mtime = os.stat(path).st_mtime
except OSError:
# Either a module with no .py file, or it's been deleted.
mtime = None
if path not in self.mtimes:
# If a module has no .py file, this will be None.
self.mtimes[path] = mtime
else:
#self.log.info("checking %s", path)
if mtime is None or mtime > oldtime:
if path.endswith(config.filename_ext) and path in [k for k,d in config.sources]:
self.on_config_modified(path)
self.mtimes[path] = mtime
else:
self.on_module_modified(path)
return
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG,
format = '%(levelname)-8s %(name)-20s %(message)s',
datefmt = '%d %b %H:%M:%S'
)
import time, smisk.core
ar = Autoreloader()
ar.start()
time.sleep(4)
print 'Stopping'
ar.stop()
| 30.107438 | 92 | 0.609937 |
acf4bbbc4b992cc4b3921aa4d11352b68942cb7b | 1,430 | py | Python | testPrograms/thoughtProbes.py | iculus/brainDisplay | 04f26c86fde2776c7de0c41e77b27a4cd8922445 | [
"MIT"
] | null | null | null | testPrograms/thoughtProbes.py | iculus/brainDisplay | 04f26c86fde2776c7de0c41e77b27a4cd8922445 | [
"MIT"
] | null | null | null | testPrograms/thoughtProbes.py | iculus/brainDisplay | 04f26c86fde2776c7de0c41e77b27a4cd8922445 | [
"MIT"
] | null | null | null | import pygame
pygame.init()
BLACK = (0,0,0)
WHITE = (255,255,255)
infoObject = pygame.display.Info()
#size = (infoObject.current_w, infoObject.current_h)
# screenWidth = infoObject.current_w
# screenHeight = infoObject.current_h
size =(700,500)
screenWidth = size[0]
screenHeight = size[1]
#screen = pygame.display.set_mode(size,pygame.FULLSCREEN)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("SART")
fontSize = 40
myfont = pygame.font.SysFont("monospace", fontSize)
CarryOn = True
onTask = "Were yout thinking about something other than what you were doing?"
stimulusDependent = "where you thinking about your surroundings?"
freelyMoving = "Was your mind moving about freely?"
margin = 0
border = 200
numBoxes = 7
sWidth = size[0]-border
sHeight = size[1]-border
calculatedBoxHeight = sHeight/numBoxes - margin
calculatedBoxWidth = calculatedBoxHeight
calculatedMarginWidth = sWidth-sHeight
for column in range(0+margin, sHeight, calculatedBoxWidth+margin):
for row in range(0+margin, sHeight, calculatedBoxHeight+margin):
pygame.draw.rect(screen, WHITE, [column+(calculatedMarginWidth/2)+(border/2),row+(border/2),calculatedBoxWidth,calculatedBoxHeight])
pygame.display.flip()
running = True
while running:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
running = False | 26.981132 | 141 | 0.732168 |
acf4bdae1ad7ef82fad85294095fd9e7edde68a7 | 1,656 | py | Python | cores/PTX/output_parser.py | tinochinamora/iw_imdb | 89964024bee7f8eaaa25530a8d40155251345be0 | [
"MIT"
] | 3 | 2021-09-10T08:14:45.000Z | 2022-02-25T04:53:12.000Z | cores/PTX/output_parser.py | PrincetonUniversity/ILA-Modeling-Verification | 88964aad8c465c9da82f1ec66425da9f16fc8d29 | [
"MIT"
] | 1 | 2018-06-25T08:49:22.000Z | 2018-06-25T08:49:22.000Z | cores/PTX/output_parser.py | PrincetonUniversity/ILA-Modeling-Verification | 88964aad8c465c9da82f1ec66425da9f16fc8d29 | [
"MIT"
] | 3 | 2018-06-26T11:31:40.000Z | 2021-12-01T20:16:21.000Z | import re
import pickle
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-b', '--bmc_input', type = 'string', action = 'store', dest='bmc_file')
parser.add_option('-i', '--input', type = 'string', action = 'store', dest = 'input_file')
parser.add_option('-o', '--output', type = 'string', action = 'store', dest = 'output_file')
(options, args) = parser.parse_args()
bmc_obj = open(options.bmc_file, 'r')
bmc = bmc_obj.readlines()
bmc = bmc[:-1]
bmc_obj.close()
frame_list = []
i = 0
for bmc_line in bmc:
if bmc_line[0:6] == '<Frame':
frame_list.append(i)
i += 1
frame_list.append(len(bmc))
frame_text = []
for i in range(len(frame_list) - 1):
frame_text.append(bmc[frame_list[i] : frame_list[i + 1]])
rule_obj = open(options.input_file, 'r')
rule_list = rule_obj.readlines()
rule_obj.close()
rule = ''
for rule_text in rule_list:
rule += (rule_text + '|')
rule = rule[:-1]
output_text = []
output_text.append(frame_text[0])
for i in range(1, len(frame_text)):
current_frame = frame_text[i]
last_frame = frame_text[i - 1]
new_frame = []
new_frame.append(current_frame[0])
print len(current_frame)
print len(last_frame)
for j in range(1, len(current_frame)):
if re.match(rule, current_frame[j]):
new_frame.append(current_frame[j])
else:
if (len(last_frame) > j):
if current_frame[j] != last_frame[j]:
new_frame.append(current_frame[j])
output_text.append(new_frame)
output_obj = open(options.output_file, 'w')
for frame in output_text:
output_obj.writelines(frame)
output_obj.close()
| 27.6 | 92 | 0.653986 |
acf4be66e0943bc2302db328f3149c41c8d34f03 | 23,498 | py | Python | full_config/full_centerpoint_01voxel_second_secfpn_4x8_cyclic_20e_nus.py | fjczx/mmdetection3d | c6493386a82fcb30871a2afe315b2998b0291279 | [
"Apache-2.0"
] | null | null | null | full_config/full_centerpoint_01voxel_second_secfpn_4x8_cyclic_20e_nus.py | fjczx/mmdetection3d | c6493386a82fcb30871a2afe315b2998b0291279 | [
"Apache-2.0"
] | null | null | null | full_config/full_centerpoint_01voxel_second_secfpn_4x8_cyclic_20e_nus.py | fjczx/mmdetection3d | c6493386a82fcb30871a2afe315b2998b0291279 | [
"Apache-2.0"
] | null | null | null | point_cloud_range = [-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]
class_names = [
'car', 'truck', 'construction_vehicle', 'bus', 'trailer', 'barrier',
'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
]
dataset_type = 'NuScenesDataset'
data_root = 'data/nuscenes/'
input_modality = dict(
use_lidar=True,
use_camera=False,
use_radar=False,
use_map=False,
use_external=False)
file_client_args = dict(backend='disk')
train_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(type='LoadAnnotations3D', with_bbox_3d=True, with_label_3d=True),
dict(
type='ObjectSample',
db_sampler=dict(
data_root='data/nuscenes/',
info_path='data/nuscenes/nuscenes_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(
car=5,
truck=5,
bus=5,
trailer=5,
construction_vehicle=5,
traffic_cone=5,
barrier=5,
motorcycle=5,
bicycle=5,
pedestrian=5)),
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian',
'traffic_cone'
],
sample_groups=dict(
car=2,
truck=3,
construction_vehicle=7,
bus=4,
trailer=6,
barrier=2,
motorcycle=6,
bicycle=6,
pedestrian=2,
traffic_cone=2),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk')))),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.3925, 0.3925],
scale_ratio_range=[0.95, 1.05],
translation_std=[0, 0, 0]),
dict(
type='RandomFlip3D',
sync_2d=False,
flip_ratio_bev_horizontal=0.5,
flip_ratio_bev_vertical=0.5),
dict(
type='PointsRangeFilter',
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
dict(
type='ObjectNameFilter',
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
]),
dict(type='Collect3D', keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
]
test_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian',
'traffic_cone'
],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
]
eval_pipeline = [
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
],
with_label=False),
dict(type='Collect3D', keys=['points'])
]
data = dict(
samples_per_gpu=4,
workers_per_gpu=4,
train=dict(
type='CBGSDataset',
data_root='data/nuscenes/',
ann_file='data/nuscenes/nuscenes_infos_train.pkl',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=10,
file_client_args=dict(backend='disk')),
dict(
type='LoadAnnotations3D',
with_bbox_3d=True,
with_label_3d=True),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.3925, 0.3925],
scale_ratio_range=[0.95, 1.05],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D', flip_ratio_bev_horizontal=0.5),
dict(
type='PointsRangeFilter',
point_cloud_range=[-50, -50, -5, 50, 50, 3]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[-50, -50, -5, 50, 50, 3]),
dict(
type='ObjectNameFilter',
classes=[
'car', 'truck', 'trailer', 'bus', 'construction_vehicle',
'bicycle', 'motorcycle', 'pedestrian', 'traffic_cone',
'barrier'
]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'trailer', 'bus', 'construction_vehicle',
'bicycle', 'motorcycle', 'pedestrian', 'traffic_cone',
'barrier'
]),
dict(
type='Collect3D',
keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
],
classes=[
'car', 'truck', 'trailer', 'bus', 'construction_vehicle',
'bicycle', 'motorcycle', 'pedestrian', 'traffic_cone', 'barrier'
],
modality=dict(
use_lidar=True,
use_camera=False,
use_radar=False,
use_map=False,
use_external=False),
test_mode=False,
box_type_3d='LiDAR',
dataset=dict(
type='NuScenesDataset',
data_root='data/nuscenes/',
ann_file='data/nuscenes/nuscenes_infos_train.pkl',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='LoadAnnotations3D',
with_bbox_3d=True,
with_label_3d=True),
dict(
type='ObjectSample',
db_sampler=dict(
data_root='data/nuscenes/',
info_path='data/nuscenes/nuscenes_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(
car=5,
truck=5,
bus=5,
trailer=5,
construction_vehicle=5,
traffic_cone=5,
barrier=5,
motorcycle=5,
bicycle=5,
pedestrian=5)),
classes=[
'car', 'truck', 'construction_vehicle', 'bus',
'trailer', 'barrier', 'motorcycle', 'bicycle',
'pedestrian', 'traffic_cone'
],
sample_groups=dict(
car=2,
truck=3,
construction_vehicle=7,
bus=4,
trailer=6,
barrier=2,
motorcycle=6,
bicycle=6,
pedestrian=2,
traffic_cone=2),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk')))),
dict(
type='GlobalRotScaleTrans',
rot_range=[-0.3925, 0.3925],
scale_ratio_range=[0.95, 1.05],
translation_std=[0, 0, 0]),
dict(
type='RandomFlip3D',
sync_2d=False,
flip_ratio_bev_horizontal=0.5,
flip_ratio_bev_vertical=0.5),
dict(
type='PointsRangeFilter',
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
dict(
type='ObjectRangeFilter',
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
dict(
type='ObjectNameFilter',
classes=[
'car', 'truck', 'construction_vehicle', 'bus',
'trailer', 'barrier', 'motorcycle', 'bicycle',
'pedestrian', 'traffic_cone'
]),
dict(type='PointShuffle'),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus',
'trailer', 'barrier', 'motorcycle', 'bicycle',
'pedestrian', 'traffic_cone'
]),
dict(
type='Collect3D',
keys=['points', 'gt_bboxes_3d', 'gt_labels_3d'])
],
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian',
'traffic_cone'
],
test_mode=False,
use_valid_flag=True,
box_type_3d='LiDAR')),
val=dict(
type='NuScenesDataset',
data_root='data/nuscenes/',
ann_file='data/nuscenes/nuscenes_infos_val.pkl',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[
-51.2, -51.2, -5.0, 51.2, 51.2, 3.0
]),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus',
'trailer', 'barrier', 'motorcycle', 'bicycle',
'pedestrian', 'traffic_cone'
],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
],
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
],
modality=dict(
use_lidar=True,
use_camera=False,
use_radar=False,
use_map=False,
use_external=False),
test_mode=True,
box_type_3d='LiDAR'),
test=dict(
type='NuScenesDataset',
data_root='data/nuscenes/',
ann_file='data/nuscenes/nuscenes_infos_val.pkl',
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='MultiScaleFlipAug3D',
img_scale=(1333, 800),
pts_scale_ratio=1,
flip=False,
transforms=[
dict(
type='GlobalRotScaleTrans',
rot_range=[0, 0],
scale_ratio_range=[1.0, 1.0],
translation_std=[0, 0, 0]),
dict(type='RandomFlip3D'),
dict(
type='PointsRangeFilter',
point_cloud_range=[
-51.2, -51.2, -5.0, 51.2, 51.2, 3.0
]),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus',
'trailer', 'barrier', 'motorcycle', 'bicycle',
'pedestrian', 'traffic_cone'
],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
],
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
],
modality=dict(
use_lidar=True,
use_camera=False,
use_radar=False,
use_map=False,
use_external=False),
test_mode=True,
box_type_3d='LiDAR'))
evaluation = dict(
interval=20,
pipeline=[
dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=5,
file_client_args=dict(backend='disk')),
dict(
type='LoadPointsFromMultiSweeps',
sweeps_num=9,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk'),
pad_empty_sweeps=True,
remove_close=True),
dict(
type='DefaultFormatBundle3D',
class_names=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer',
'barrier', 'motorcycle', 'bicycle', 'pedestrian',
'traffic_cone'
],
with_label=False),
dict(type='Collect3D', keys=['points'])
])
voxel_size = [0.1, 0.1, 0.2]
model = dict(
type='CenterPoint',
# pts: List[torch.Tensor([N, 5])]
# N is the number of points
# for each keyframe, using the Voxelization method to generate the voxel.
pts_voxel_layer=dict(
max_num_points=10,
voxel_size=[0.1, 0.1, 0.2],
max_voxels=(90000, 120000),
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0]),
# voxels: Torch.Tensor([num_points, 10, 5])
# 10 is the max points in each voxel
# 5 is the dimension of each voxel
# num_points is the number of points in each voxel
pts_voxel_encoder=dict(type='HardSimpleVFE', num_features=5),
# voxel_features: Torch.Tensor([num_points, 5])
# in centerPoint, using the hardSimpleVFE to encode the voxel.
# hardSimpleVFE simply average the points in each voxel.
pts_middle_encoder=dict(
type='SparseEncoder',
in_channels=5,
sparse_shape=[41, 1024, 1024],
output_channels=128,
order=('conv', 'norm', 'act'),
encoder_channels=((16, 16, 32), (32, 32, 64), (64, 64, 128), (128,
128)),
encoder_paddings=((0, 0, 1), (0, 0, 1), (0, 0, [0, 1, 1]), (0, 0)),
block_type='basicblock'),
# x: torch.Tensor([1, 256, 128, 128])
# this is the SpareEncoder output. in centerPoint, using the SpareEncoder to encode the voxel.
pts_backbone=dict(
type='SECOND',
in_channels=256,
out_channels=[128, 256],
layer_nums=[5, 5],
layer_strides=[1, 2],
norm_cfg=dict(type='BN', eps=0.001, momentum=0.01),
conv_cfg=dict(type='Conv2d', bias=False)),
pts_neck=dict(
type='SECONDFPN',
in_channels=[128, 256],
out_channels=[256, 256],
upsample_strides=[1, 2],
norm_cfg=dict(type='BN', eps=0.001, momentum=0.01),
upsample_cfg=dict(type='deconv', bias=False),
use_conv_for_no_stride=True),
# x: List[torch.Tensor([1, 512, 128, 128])]
# current x is the output of the SECONDFPN.
pts_bbox_head=dict(
type='CenterHead',
in_channels=512,
tasks=[
dict(num_class=1, class_names=['car']),
dict(num_class=2, class_names=['truck', 'construction_vehicle']),
dict(num_class=2, class_names=['bus', 'trailer']),
dict(num_class=1, class_names=['barrier']),
dict(num_class=2, class_names=['motorcycle', 'bicycle']),
dict(num_class=2, class_names=['pedestrian', 'traffic_cone'])
],
common_heads=dict(
reg=(2, 2), height=(1, 2), dim=(3, 2), rot=(2, 2), vel=(2, 2)),
share_conv_channel=64,
bbox_coder=dict(
type='CenterPointBBoxCoder',
post_center_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_num=500,
score_threshold=0.1,
out_size_factor=8,
voxel_size=[0.1, 0.1],
code_size=9,
pc_range=[-51.2, -51.2]),
separate_head=dict(
type='SeparateHead', init_bias=-2.19, final_kernel=3),
loss_cls=dict(type='GaussianFocalLoss', reduction='mean'),
loss_bbox=dict(type='L1Loss', reduction='mean', loss_weight=0.25),
norm_bbox=True),
train_cfg=dict(
pts=dict(
grid_size=[1024, 1024, 40],
voxel_size=[0.1, 0.1, 0.2],
out_size_factor=8,
dense_reg=1,
gaussian_overlap=0.1,
max_objs=500,
min_radius=2,
code_weights=[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.2, 0.2],
point_cloud_range=[-51.2, -51.2, -5.0, 51.2, 51.2, 3.0])),
test_cfg=dict(
pts=dict(
post_center_limit_range=[-61.2, -61.2, -10.0, 61.2, 61.2, 10.0],
max_per_img=500,
max_pool_nms=False,
min_radius=[4, 12, 10, 1, 0.85, 0.175],
score_threshold=0.1,
out_size_factor=8,
voxel_size=[0.1, 0.1],
nms_type='rotate',
pre_max_size=1000,
post_max_size=83,
nms_thr=0.2,
pc_range=[-51.2, -51.2])))
optimizer = dict(type='AdamW', lr=0.0001, weight_decay=0.01)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
lr_config = dict(
policy='cyclic',
target_ratio=(10, 0.0001),
cyclic_times=1,
step_ratio_up=0.4)
momentum_config = dict(
policy='cyclic',
target_ratio=(0.8947368421052632, 1),
cyclic_times=1,
step_ratio_up=0.4)
runner = dict(type='EpochBasedRunner', max_epochs=20)
checkpoint_config = dict(interval=1)
log_config = dict(
interval=50,
hooks=[dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')])
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = None
load_from = None
resume_from = None
workflow = [('train', 1)]
db_sampler = dict(
data_root='data/nuscenes/',
info_path='data/nuscenes/nuscenes_dbinfos_train.pkl',
rate=1.0,
prepare=dict(
filter_by_difficulty=[-1],
filter_by_min_points=dict(
car=5,
truck=5,
bus=5,
trailer=5,
construction_vehicle=5,
traffic_cone=5,
barrier=5,
motorcycle=5,
bicycle=5,
pedestrian=5)),
classes=[
'car', 'truck', 'construction_vehicle', 'bus', 'trailer', 'barrier',
'motorcycle', 'bicycle', 'pedestrian', 'traffic_cone'
],
sample_groups=dict(
car=2,
truck=3,
construction_vehicle=7,
bus=4,
trailer=6,
barrier=2,
motorcycle=6,
bicycle=6,
pedestrian=2,
traffic_cone=2),
points_loader=dict(
type='LoadPointsFromFile',
coord_type='LIDAR',
load_dim=5,
use_dim=[0, 1, 2, 3, 4],
file_client_args=dict(backend='disk')))
| 36.544323 | 98 | 0.47387 |
acf4c0238811756d0d1c5f538eb9002ee0b49edc | 1,279 | py | Python | PYH2L/DataCollector.py | GwynWilson/H2L | 201557dea156c8340108fecaa130ba3d3bac33df | [
"Apache-2.0"
] | null | null | null | PYH2L/DataCollector.py | GwynWilson/H2L | 201557dea156c8340108fecaa130ba3d3bac33df | [
"Apache-2.0"
] | null | null | null | PYH2L/DataCollector.py | GwynWilson/H2L | 201557dea156c8340108fecaa130ba3d3bac33df | [
"Apache-2.0"
] | null | null | null | import os
import cv2
import numpy as np
import pandas as pd
'''
This code is to take each of the individual image-coordinate pairs and bundle them together to give a single file.
'''
local_repo_path = os.getcwd()
test3 = {}
def append_data(filename):
global test3
os.chdir("Data/TestImages")
img = cv2.imread("{}.png".format(filename))
os.chdir(local_repo_path + '\Data\TestCoords')
coords = pd.read_csv("{}.csv".format(filename), header=0, engine='c')
# Splitting the colour channels of the original image into separate arrays.
b, g, r = cv2.split(img)
intermediate_b = pd.DataFrame(data=b)
test = pd.concat([np.transpose(coords), intermediate_b], axis=1, keys=['Coords', 'Img'], join='outer', sort=True)
test = test.transpose()
test3[filename] = test
os.chdir(local_repo_path)
for name in os.listdir(local_repo_path + '\Data\TestCoords'):
append_data(name.split('.')[0])
test4 = pd.concat(test3.values(), axis=0, keys=test3.keys(), sort=True)
test4 = test4.apply(lambda x: pd.Series(x.dropna().values), 1)
test4.columns = test4.columns[:len(test4.columns)]
test4 = test4.reindex_axis(test4.columns, 1)
test4 = test4.dropna(axis=1, how='all')
print(test4)
test4.to_csv(local_repo_path + '\Data\MergeDat\Ting.csv')
| 29.068182 | 117 | 0.698202 |
acf4c090fa25e784b861edf5b0431117816bb413 | 27 | py | Python | login.py | yhzy541990531/python10 | 9708c199d24bee00101f1b10edceefde7574db08 | [
"MIT"
] | null | null | null | login.py | yhzy541990531/python10 | 9708c199d24bee00101f1b10edceefde7574db08 | [
"MIT"
] | null | null | null | login.py | yhzy541990531/python10 | 9708c199d24bee00101f1b10edceefde7574db08 | [
"MIT"
] | null | null | null | num=1
num =2
num =manage
| 4.5 | 11 | 0.62963 |
acf4c134825cbcc54461b413a44614b4d6d64b10 | 14,019 | py | Python | cutadapt/report.py | jgibson2/cutadapt-vbim | 5e1693e553d53c0aea0bfef33e53b90c44d7911b | [
"MIT"
] | 1 | 2018-12-12T10:31:11.000Z | 2018-12-12T10:31:11.000Z | cutadapt/report.py | jgibson2/cutadapt-vbim | 5e1693e553d53c0aea0bfef33e53b90c44d7911b | [
"MIT"
] | null | null | null | cutadapt/report.py | jgibson2/cutadapt-vbim | 5e1693e553d53c0aea0bfef33e53b90c44d7911b | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Routines for printing a report.
"""
from __future__ import print_function, division, absolute_import
import sys
from contextlib import contextmanager
import textwrap
from .adapters import BACK, FRONT, PREFIX, SUFFIX, ANYWHERE, LINKED
from .modifiers import QualityTrimmer, AdapterCutter
from .filters import (NoFilter, PairedNoFilter, TooShortReadFilter, TooLongReadFilter,
DiscardTrimmedFilter, DiscardUntrimmedFilter, Demultiplexer, NContentFilter)
def safe_divide(numerator, denominator):
if numerator is None or not denominator:
return 0.0
else:
return numerator / denominator
class Statistics:
def __init__(self):
"""
"""
self.n = 0
self.total_bp = 0
self.total_bp1 = 0
self.total_bp2 = 0
self.paired = False
self.time = 0.01 # CPU time in seconds
self.too_short = None
self.too_long = None
self.written = 0
self.written_bp = [0, 0]
self.too_many_n = None
self.with_adapters = [0, 0]
self.quality_trimmed_bp = [0, 0]
self.did_quality_trimming = False
self.quality_trimmed = 0
self.adapter_stats = [None, None]
self.adapter_lists = [[], []]
# These attributes are derived from the ones above
self.total_written_bp = 0
self.too_short_fraction = 0
self.too_long_fraction = 0
self.total_written_bp_fraction = 0
self.with_adapters_fraction = []
self.written_fraction = 0
self.quality_trimmed_fraction = 0
self.too_many_n_fraction = None
def collect(self, n, total_bp1, total_bp2, time, modifiers, modifiers2, writers):
"""
n -- total number of reads
total_bp1 -- number of bases in first reads
total_bp2 -- number of bases in second reads. None for single-end data.
time -- CPU time
"""
self.n = n
self.total_bp = total_bp1
self.total_bp1 = total_bp1
if total_bp2 is None:
self.paired = False
else:
self.paired = True
self.total_bp2 = total_bp2
self.total_bp += total_bp2
self.time = max(time, 0.01)
# Collect statistics from writers/filters
for w in writers:
if isinstance(w, (NoFilter, PairedNoFilter, Demultiplexer)) or \
isinstance(w.filter, (DiscardTrimmedFilter, DiscardUntrimmedFilter)):
self.written += w.written
self.written_bp[0] += w.written_bp[0]
self.written_bp[1] += w.written_bp[1]
elif isinstance(w.filter, TooShortReadFilter):
self.too_short = w.filtered
elif isinstance(w.filter, TooLongReadFilter):
self.too_long = w.filtered
elif isinstance(w.filter, NContentFilter):
self.too_many_n = w.filtered
assert self.written is not None
# Collect statistics from modifiers
for i, modifiers_list in [(0, modifiers), (1, modifiers2)]:
for modifier in modifiers_list:
if isinstance(modifier, QualityTrimmer):
self.quality_trimmed_bp[i] = modifier.trimmed_bases
self.did_quality_trimming = True
elif isinstance(modifier, AdapterCutter):
self.with_adapters[i] += modifier.with_adapters
self.adapter_stats[i] = modifier.adapter_statistics
self.adapter_lists[i] = modifier.adapters
# Set the attributes that are derived from the other ones
self.quality_trimmed = sum(self.quality_trimmed_bp)
self.total_written_bp = sum(self.written_bp)
self.written_fraction = safe_divide(self.written, self.n)
self.with_adapters_fraction = [safe_divide(v, self.n) for v in self.with_adapters]
self.quality_trimmed_fraction = safe_divide(self.quality_trimmed, self.total_bp)
self.total_written_bp_fraction = safe_divide(self.total_written_bp, self.total_bp)
self.too_short_fraction = safe_divide(self.too_short, self.n)
self.too_long_fraction = safe_divide(self.too_long, self.n)
self.too_many_n_fraction = safe_divide(self.too_many_n, self.n)
ADAPTER_TYPES = {
BACK: "regular 3'",
FRONT: "regular 5'",
PREFIX: "anchored 5'",
SUFFIX: "anchored 3'",
ANYWHERE: "variable 5'/3'",
LINKED: "linked",
}
def print_error_ranges(adapter_length, error_rate):
print("No. of allowed errors:")
prev = 0
for errors in range(1, int(error_rate * adapter_length) + 1):
r = int(errors / error_rate)
print("{0}-{1} bp: {2};".format(prev, r - 1, errors - 1), end=' ')
prev = r
if prev == adapter_length:
print("{0} bp: {1}".format(adapter_length, int(error_rate * adapter_length)))
else:
print("{0}-{1} bp: {2}".format(prev, adapter_length, int(error_rate * adapter_length)))
print()
def print_histogram(adapter_statistics, where, adapter, n, gc_content):
"""
Print a histogram. Also, print the no. of reads expected to be
trimmed by chance (assuming a uniform distribution of nucleotides in the reads).
adapter_statistics -- AdapterStatistics object
where -- 'front' or 'back'
adapter_length -- adapter length
n -- total no. of reads.
"""
if where not in ('front', 'back'):
assert ValueError('where must be "front" or "back"')
if where == 'front':
d = adapter_statistics.lengths_front
errors = adapter_statistics.errors_front
else:
d = adapter_statistics.lengths_back
errors = adapter_statistics.errors_back
match_probabilities = adapter.random_match_probabilities(gc_content=gc_content)
print("length", "count", "expect", "max.err", "error counts", sep="\t")
for length in sorted(d):
# when length surpasses adapter_length, the
# probability does not increase anymore
expect = n * match_probabilities[min(len(adapter), length)]
count = d[length]
max_errors = max(errors[length].keys())
errs = ' '.join(str(errors[length][e]) for e in range(max_errors + 1))
print(
length,
count,
"{0:.1F}".format(expect),
int(adapter.max_error_rate * min(length, len(adapter))),
errs,
sep="\t")
print()
def print_adjacent_bases(bases):
"""
Print a summary of the bases preceding removed adapter sequences.
Print a warning if one of the bases is overrepresented and there are
at least 20 preceding bases available.
Return whether a warning was printed.
"""
total = sum(bases.values())
if total == 0:
return False
print('Bases preceding removed adapters:')
warnbase = None
for base in ['A', 'C', 'G', 'T', '']:
b = base if base != '' else 'none/other'
fraction = 1.0 * bases[base] / total
print(' {0}: {1:.1%}'.format(b, fraction))
if fraction > 0.8 and base != '':
warnbase = b
if total >= 20 and warnbase is not None:
print('WARNING:')
print(' The adapter is preceded by "{0}" extremely often.'.format(warnbase))
print(' The provided adapter sequence may be incomplete.')
print(' To fix the problem, add "{0}" to the beginning of the adapter sequence.'.format(warnbase))
print()
return True
print()
return False
@contextmanager
def redirect_standard_output(file):
if file is None:
yield
return
old_stdout = sys.stdout
sys.stdout = file
yield
sys.stdout = old_stdout
def print_report(stats, gc_content):
"""Print report to standard output."""
if stats.n == 0:
print("No reads processed! Either your input file is empty or you used the wrong -f/--format parameter.")
return
print("Finished in {0:.2F} s ({1:.0F} us/read; {2:.2F} M reads/minute).".format(
stats.time, 1E6 * stats.time / stats.n, stats.n / stats.time * 60 / 1E6))
report = "\n=== Summary ===\n\n"
if stats.paired:
report += textwrap.dedent("""\
Total read pairs processed: {n:13,d}
Read 1 with adapter: {with_adapters[0]:13,d} ({with_adapters_fraction[0]:.1%})
Read 2 with adapter: {with_adapters[1]:13,d} ({with_adapters_fraction[1]:.1%})
""")
else:
report += textwrap.dedent("""\
Total reads processed: {n:13,d}
Reads with adapters: {with_adapters[0]:13,d} ({with_adapters_fraction[0]:.1%})
""")
if stats.too_short is not None:
report += "{pairs_or_reads} that were too short: {too_short:13,d} ({too_short_fraction:.1%})\n"
if stats.too_long is not None:
report += "{pairs_or_reads} that were too long: {too_long:13,d} ({too_long_fraction:.1%})\n"
if stats.too_many_n is not None:
report += "{pairs_or_reads} with too many N: {too_many_n:13,d} ({too_many_n_fraction:.1%})\n"
report += textwrap.dedent("""\
{pairs_or_reads} written (passing filters): {written:13,d} ({written_fraction:.1%})
Total basepairs processed: {total_bp:13,d} bp
""")
if stats.paired:
report += " Read 1: {total_bp1:13,d} bp\n"
report += " Read 2: {total_bp2:13,d} bp\n"
if stats.did_quality_trimming:
report += "Quality-trimmed: {quality_trimmed:13,d} bp ({quality_trimmed_fraction:.1%})\n"
if stats.paired:
report += " Read 1: {quality_trimmed_bp[0]:13,d} bp\n"
report += " Read 2: {quality_trimmed_bp[1]:13,d} bp\n"
report += "Total written (filtered): {total_written_bp:13,d} bp ({total_written_bp_fraction:.1%})\n"
if stats.paired:
report += " Read 1: {written_bp[0]:13,d} bp\n"
report += " Read 2: {written_bp[1]:13,d} bp\n"
v = vars(stats)
v['pairs_or_reads'] = "Pairs" if stats.paired else "Reads"
try:
report = report.format(**v)
except ValueError:
# Python 2.6 does not support the comma format specifier (PEP 378)
report = report.replace(",d}", "d}").format(**v)
print(report)
warning = False
for which_in_pair in (0, 1):
for adapter in stats.adapter_lists[which_in_pair]:
adapter_statistics = stats.adapter_stats[which_in_pair][adapter]
total_front = sum(adapter_statistics.lengths_front.values())
total_back = sum(adapter_statistics.lengths_back.values())
total = total_front + total_back
where = adapter.where
assert where in (ANYWHERE, LINKED) or (where in (BACK, SUFFIX) and total_front == 0) or (
where in (FRONT, PREFIX) and total_back == 0)
if stats.paired:
extra = 'First read: ' if which_in_pair == 0 else 'Second read: '
else:
extra = ''
print("=" * 3, extra + "Adapter", adapter.name, "=" * 3)
print()
if where == LINKED:
print("Sequence: {0}...{1}; Type: linked; Length: {2}+{3}; "
"5' trimmed: {4} times; 3' trimmed: {5} times".format(
adapter.front_adapter.sequence,
adapter.back_adapter.sequence,
len(adapter.front_adapter.sequence),
len(adapter.back_adapter.sequence),
total_front, total_back))
else:
print("Sequence: {0}; Type: {1}; Length: {2}; Trimmed: {3} times.".
format(adapter.sequence, ADAPTER_TYPES[adapter.where],
len(adapter.sequence), total))
if total == 0:
print()
continue
if where == ANYWHERE:
print(total_front, "times, it overlapped the 5' end of a read")
print(total_back, "times, it overlapped the 3' end or was within the read")
print()
print_error_ranges(len(adapter), adapter.max_error_rate)
print("Overview of removed sequences (5')")
print_histogram(adapter_statistics, 'front', adapter, stats.n, gc_content)
print()
print("Overview of removed sequences (3' or within)")
print_histogram(adapter_statistics, 'back', adapter, stats.n, gc_content)
elif where == LINKED:
print()
print_error_ranges(len(adapter.front_adapter), adapter.front_adapter.max_error_rate)
print_error_ranges(len(adapter.back_adapter), adapter.back_adapter.max_error_rate)
print("Overview of removed sequences at 5' end")
print_histogram(adapter_statistics, 'front',
adapter.front_adapter, stats.n, gc_content)
print()
print("Overview of removed sequences at 3' end")
print_histogram(adapter_statistics, 'back',
adapter.back_adapter, stats.n, gc_content)
elif where in (FRONT, PREFIX):
print()
print_error_ranges(len(adapter), adapter.max_error_rate)
print("Overview of removed sequences")
print_histogram(adapter_statistics, 'front', adapter, stats.n, gc_content)
else:
assert where in (BACK, SUFFIX)
print()
print_error_ranges(len(adapter), adapter.max_error_rate)
warning = warning or print_adjacent_bases(adapter_statistics.adjacent_bases)
print("Overview of removed sequences")
print_histogram(adapter_statistics, 'back', adapter, stats.n, gc_content)
if warning:
print('WARNING:')
print(' One or more of your adapter sequences may be incomplete.')
print(' Please see the detailed output above.')
| 41.111437 | 113 | 0.60097 |
acf4c13ed5bf4fedc387496da4abf6f9e94dc8f7 | 117 | py | Python | __main__.py | DevilJamJar/PinBoard | 17a1943469833fdbae1b748f4c15b5c1f475a991 | [
"Unlicense"
] | 2 | 2020-09-27T14:34:30.000Z | 2020-09-27T15:26:15.000Z | __main__.py | DevilJamJar/PinBoard | 17a1943469833fdbae1b748f4c15b5c1f475a991 | [
"Unlicense"
] | null | null | null | __main__.py | DevilJamJar/PinBoard | 17a1943469833fdbae1b748f4c15b5c1f475a991 | [
"Unlicense"
] | 2 | 2020-11-21T16:58:20.000Z | 2020-12-10T00:23:03.000Z | import asyncio
from bot import Bot
event_loop = asyncio.get_event_loop()
bot = Bot(event_loop=event_loop)
bot.run()
| 16.714286 | 37 | 0.786325 |
acf4c1553e9a94c396d13dd415c88db0302b44bc | 1,083 | py | Python | migrations/versions/8815a44de839_.py | RichardMM/testhrku | d01f86d554ddbd98f2ed733607f734f2770bf735 | [
"BSD-2-Clause"
] | null | null | null | migrations/versions/8815a44de839_.py | RichardMM/testhrku | d01f86d554ddbd98f2ed733607f734f2770bf735 | [
"BSD-2-Clause"
] | null | null | null | migrations/versions/8815a44de839_.py | RichardMM/testhrku | d01f86d554ddbd98f2ed733607f734f2770bf735 | [
"BSD-2-Clause"
] | null | null | null | """empty message
Revision ID: 8815a44de839
Revises: 60b908b83f9d
Create Date: 2018-04-25 14:38:01.038669
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = '8815a44de839'
down_revision = '60b908b83f9d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('projects', 'proj_approval',
existing_type=mysql.TINYINT(display_width=1),
type_=sa.Boolean(),
existing_nullable=True)
op.add_column('users', sa.Column('user_approver_rights', sa.Boolean(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'user_approver_rights')
op.alter_column('projects', 'proj_approval',
existing_type=sa.Boolean(),
type_=mysql.TINYINT(display_width=1),
existing_nullable=True)
# ### end Alembic commands ###
| 29.27027 | 90 | 0.671283 |
acf4c196d70029b35a88b2b1406464490dd41055 | 389 | py | Python | GUI/11 - Text Area.py | PedroHAlvesS/Exercicios-Python-Curso-em-video | 7ab187d65ddada252450b048608dc4898e5c67c8 | [
"MIT"
] | null | null | null | GUI/11 - Text Area.py | PedroHAlvesS/Exercicios-Python-Curso-em-video | 7ab187d65ddada252450b048608dc4898e5c67c8 | [
"MIT"
] | null | null | null | GUI/11 - Text Area.py | PedroHAlvesS/Exercicios-Python-Curso-em-video | 7ab187d65ddada252450b048608dc4898e5c67c8 | [
"MIT"
] | null | null | null | from tkinter import *
def submit():
dado = text.get("1.0",END)
print(dado)
janela = Tk()
text = Text(janela,
bg="light yellow",
font=("Ink Free",30),
height=8,
width=20,
padx=20,
pady=20,
)
text.pack()
botao = Button(janela, text="Confirmar", command=submit)
botao.pack()
janela.mainloop()
| 18.52381 | 56 | 0.511568 |
acf4c308fac0dc8134e638b5ad52830e8171753b | 11,157 | py | Python | data_loader.py | oucxlw/auxiva-ipa | bc07bef753446d3fdb790e317f5ad96179f12fde | [
"MIT"
] | 28 | 2020-08-25T01:59:24.000Z | 2021-12-13T13:28:21.000Z | data_loader.py | fakufaku/auxiva-ipa | ec9d17cf017a5f87c1e8290e6ea9c17479ce7e1c | [
"MIT"
] | 1 | 2021-12-23T04:10:48.000Z | 2021-12-23T04:10:48.000Z | data_loader.py | oucxlw/auxiva-ipa | bc07bef753446d3fdb790e317f5ad96179f12fde | [
"MIT"
] | 8 | 2020-10-29T01:31:19.000Z | 2021-12-21T02:55:20.000Z | # This file loads the data from the simulation result file and creates a
# pandas data frame for further processing.
#
# Copyright 2020 Robin Scheibler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import json
import os
from pathlib import Path
import numpy as np
import pandas as pd
threshold_convergence = 1e-4
threshold_convergence_one_step = 1e-0
# This table maps some of the labels we used in the
# simulation to labels we would like to use in the figures
# of the paper
substitutions = {
"Algorithm": {
"five_laplace": "FIVE",
"overiva_ip_laplace": "OverIVA-IP",
"overiva_ip2_laplace": "OverIVA-IP2",
"overiva_ip_block_laplace": "OverIVA-IP-NP",
"overiva_ip2_block_laplace": "OverIVA-IP2-NP",
"overiva_demix_bg_laplace": "OverIVA-DX/BG",
"ogive_laplace": "OGIVEs",
"auxiva_laplace": "AuxIVA-IP (PCA)",
"auxiva_laplace_nopca": "AuxIVA-IP",
"auxiva_iss_laplace": "AuxIVA-ISS (PCA)",
"auxiva_iss_laplace_nopca": "AuxIVA-ISS",
"auxiva2_laplace": "AuxIVA-IP2 (PCA)",
"auxiva2_laplace_nopca": "AuxIVA-IP2",
"auxiva_pca": "PCA+AuxIVA-IP",
"auxiva_demix_steer_nopca": "AuxIVA-IPA",
"auxiva_demix_steer_pca": "AuxIVA-IPA (PCA)",
"auxiva_ipa_nopca": "AuxIVA-IPA",
"auxiva_ipa_pca": "AuxIVA-IPA (PCA)",
"auxiva_ipa2_nopca": "AuxIVA-IPA2",
"auxiva_ipa2_pca": "AuxIVA-IPA2 (PCA)",
"auxiva_ipa2": "AuxIVA-IPA2 (PCA)",
"overiva_demix_steer": "OverIVA-IPA",
"fastiva": "FastIVA (PCA)",
"fastiva_nopca": "FastIVA",
"iva_ng_0.3": "IVA-NG (PCA)",
"iva_ng_0.3_nopca": "IVA-NG",
"pca": "PCA",
}
}
def load_data(dirs, pickle=False):
parameters = dict()
algorithms = dict()
df = None
data_files = []
parameters = None
for i, data_dir in enumerate(dirs):
print("Reading in", data_dir)
# add the data file from this directory
data_file = os.path.join(data_dir, "data.json")
if os.path.exists(data_file):
data_files.append(data_file)
else:
raise ValueError("File {} doesn" "t exist".format(data_file))
# get the simulation config
with open(os.path.join(data_dir, "parameters.json"), "r") as f:
new_parameters = json.load(f)
if parameters is None:
parameters = new_parameters
else:
parameters["algorithm_kwargs"].update(new_parameters["algorithm_kwargs"])
# algorithms to take in the plot
algos = algorithms.keys()
# check if a pickle file exists for these files
pickle_file = f".{parameters['name']}.pickle"
rt60_file = ".rt60.pickle"
convergence_file = ".convergence.pickle"
final_value_file = ".final_value.pickle"
if os.path.isfile(pickle_file) and pickle:
print("Reading existing pickle file...")
# read the pickle file
df = pd.read_pickle(pickle_file)
rt60 = pd.read_pickle(rt60_file)
conv_tbl = pd.read_pickle(convergence_file)
final_value_tbl = pd.read_pickle(final_value_file)
else:
# reading all data files in the directory
records = []
rt60_list = []
for file in data_files:
with open(file, "r") as f:
content = json.load(f)
for seg in content:
records += seg
# build the data table line by line
print("Building table")
columns = [
"Algorithm",
"Sources",
"Interferers",
"Mics",
"RT60",
"Distance",
"SINR",
"seed",
"Iteration",
"Iteration_Index",
"Runtime [s]",
"SI-SDR [dB]",
"SI-SIR [dB]",
"\u0394SI-SDR [dB]",
"\u0394SI-SIR [dB]",
"Success",
]
table = []
convergence_table = []
final_value_table = []
num_sources = set()
copy_fields = [
"algorithm",
"n_targets",
"n_interferers",
"n_mics",
"rt60",
"dist_ratio",
"sinr",
"seed",
]
number_failed_records = 0
failed_algorithms = {}
for record in records:
algo_kwargs = parameters["algorithm_kwargs"][record["algorithm"]]["kwargs"]
if "callback_checkpoints" in algo_kwargs:
checkpoints = algo_kwargs["callback_checkpoints"].copy()
checkpoints.insert(0, 0)
algo_n_iter = algo_kwargs["n_iter"]
else:
checkpoints = list(range(len(record["sdr"])))
algo_n_iter = 1
rt60_list.append(record["rt60"])
try:
fs = parameters["room_params"]["fs"]
except KeyError:
fs = parameters["room"]["room_kwargs"]["fs"]
# runtime per iteration, per second of audio
runtime = record["runtime"] / record["n_samples"] * fs / algo_n_iter
evaltime = record["eval_time"] / record["n_samples"] * fs / algo_n_iter
if np.any(np.isnan(record["sdr"][-1])):
number_failed_records += 1
if record["algorithm"] not in failed_algorithms:
failed_algorithms[record["algorithm"]] = 1
else:
failed_algorithms[record["algorithm"]] += 1
continue
# fill the convergence table
checkpoints_interp = np.arange(checkpoints[-1] + 1)
cost_interp = np.interp(
np.arange(checkpoints[-1] + 1), checkpoints, record["cost"]
)
cost_init = record["cost"][0]
converged_iter = None
for i, (n_iter, cost) in enumerate(zip(checkpoints_interp, cost_interp)):
if i == 0:
continue
progress_one_step = cost - cost_interp[-1]
progress_total = cost - cost_init
"""
if (
np.abs(progress_one_step) / np.abs(progress_total)
< threshold_convergence
):
converged_iter = n_iter
break
"""
if (
progress_one_step
< record["n_mics"] * threshold_convergence_one_step
):
converged_iter = n_iter
break
entry = [record[field] for field in copy_fields]
if converged_iter is None:
convergence_table.append(entry + [np.nan, np.nan])
else:
convergence_table.append(
entry + [converged_iter, converged_iter * runtime]
)
# fill the SDR/SIR table
sdr_i = np.array(record["sdr"][0]) # Initial SDR
sir_i = np.array(record["sir"][0]) # Initial SDR
loop_len = min(len(checkpoints), len(record["sdr"]), len(record["sir"]))
for i, (n_iter, sdr, sir) in enumerate(
zip(checkpoints, record["sdr"], record["sir"])
):
entry = [record[field] for field in copy_fields]
entry.append(n_iter)
entry.append(i)
# seconds processing / second of audio
entry.append(runtime * n_iter)
try:
sdr_f = np.array(sdr) # Final SDR
sir_f = np.array(sir) # Final SIR
new_entry = entry + [
np.mean(sdr_f),
np.mean(sir_f),
np.mean(sdr_f - sdr_i),
np.mean(sir_f - sir_i),
float(np.mean(sir_f > 0.0)),
]
table.append(new_entry)
# record the final value only in a separate table too
if i == loop_len - 1:
final_value_table.append(new_entry)
except Exception:
continue
# create a pandas frame
print("Making PANDAS frame...")
df = pd.DataFrame(table, columns=columns)
rt60 = pd.DataFrame(rt60_list, columns=["RT60"])
conv_tbl = pd.DataFrame(
convergence_table, columns=columns[:8] + ["Iterations", "Runtime"]
)
final_value_tbl = pd.DataFrame(final_value_table, columns=columns)
df.to_pickle(pickle_file)
rt60.to_pickle(rt60_file)
conv_tbl.to_pickle(convergence_file)
final_value_tbl.to_pickle(final_value_file)
if number_failed_records > 0:
import warnings
def _warning(message, *args, **kwargs):
print(message)
warnings.showwarning = _warning
warnings.warn(f"Number of failed record: {number_failed_records}")
print(f"Summary of {number_failed_records} failures:")
for algo, n_fail in failed_algorithms.items():
print(f"{algo}: {n_fail}")
# apply the subsititutions
df = df.replace(substitutions)
conv_tbl = conv_tbl.replace(substitutions)
final_value_tbl = final_value_tbl.replace(substitutions)
return df, final_value_tbl, conv_tbl, rt60, parameters
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Load simulation data into a pandas frame"
)
parser.add_argument(
"-p",
"--pickle",
action="store_true",
help="Read the aggregated data table from a pickle cache",
)
parser.add_argument(
"dirs",
type=Path,
nargs="+",
metavar="DIR",
help="The directory containing the simulation output files.",
)
args = parser.parse_args()
dirs = args.dirs
pickle = args.pickle
df, final_value_tbl, conv_tbl, rt60, parameters = load_data(
args.dirs, pickle=pickle
)
| 33.605422 | 87 | 0.564489 |
acf4c39812b5e8d5849e4228d0e07170a486bb9e | 1,299 | py | Python | MEETINGS/tests/test_urls.py | andresbeltran98/Student_Organizational_System | 71928bcce21b06f5b8cc6f284493a102e3b10d05 | [
"MIT"
] | 1 | 2019-03-18T00:55:16.000Z | 2019-03-18T00:55:16.000Z | MEETINGS/tests/test_urls.py | andresbeltran98/Student_Organizational_System | 71928bcce21b06f5b8cc6f284493a102e3b10d05 | [
"MIT"
] | 14 | 2019-03-30T05:46:19.000Z | 2019-04-29T20:57:06.000Z | MEETINGS/tests/test_urls.py | andresbeltran98/Student_Organizational_System | 71928bcce21b06f5b8cc6f284493a102e3b10d05 | [
"MIT"
] | null | null | null | from django.test import SimpleTestCase
from django.urls import reverse, resolve
from ..views import (MeetingListView, MeetingDetailView, MeetingCreateView, MeetingUpdateView, MeetingDeleteView,
SearchListView)
class TestUrls(SimpleTestCase):
"""
These test cases check the urls of the Meetings module
"""
def test_meeting_list_url(self):
url = reverse('meetings-list')
self.assertEquals(resolve(url).func.view_class, MeetingListView)
def test_meeting_create_url(self):
url = reverse('meeting-create')
self.assertEquals(resolve(url).func.view_class, MeetingCreateView)
def test_meeting_detail_url(self):
url = reverse('meeting-detail', args=['1'])
self.assertEquals(resolve(url).func.view_class, MeetingDetailView)
def test_meeting_update_url(self):
url = reverse('meeting-update', args=['1'])
self.assertEquals(resolve(url).func.view_class, MeetingUpdateView)
def test_meeting_delete_url(self):
url = reverse('meeting-delete', args=['1'])
self.assertEquals(resolve(url).func.view_class, MeetingDeleteView)
def test_meeting_search_url(self):
url = reverse('meeting-search')
self.assertEquals(resolve(url).func.view_class, SearchListView)
| 36.083333 | 113 | 0.707467 |
acf4c39922e4ab1dfba564df0c3bd2c33ad5b2c1 | 3,651 | py | Python | project/experiments/exp_800_mile_stone/src/old/29.3.0.read_results.py | liusida/thesis-bodies | dceb8a36efd2cefc611f6749a52b56b9d3572f7a | [
"MIT"
] | null | null | null | project/experiments/exp_800_mile_stone/src/old/29.3.0.read_results.py | liusida/thesis-bodies | dceb8a36efd2cefc611f6749a52b56b9d3572f7a | [
"MIT"
] | null | null | null | project/experiments/exp_800_mile_stone/src/old/29.3.0.read_results.py | liusida/thesis-bodies | dceb8a36efd2cefc611f6749a52b56b9d3572f7a | [
"MIT"
] | null | null | null | import pickle,os,glob
import numpy as np
import pandas as pd
from common import tflogs2pandas
from common import common
args = common.args
with open("output_data/jobs_vanilla4_revi.pickle", "rb") as f:
all_jobs = pickle.load(f)
cache_path = f"output_data/tmp/vanilla_revisit_cache"
def load_tb(force=0):
try:
if force:
raise FileNotFoundError
df = pd.read_pickle(cache_path)
except FileNotFoundError:
dfs = []
for idx, job in all_jobs.items():
tb_path = f"output_data/tensorboard_vanilla4/model-399-499-599-699-CustomAlignWrapper-md{job['str_md5']}-sd{job['run_seed']}/PPO_1"
# paths = glob.glob(tb_path)
# for tb_path in paths:
# _tmp = tb_path.split("sd")[-1]
# vacc_run_seed = _tmp.split("/")[0] # need to read the run seed from vacc..
print(f"Loading {tb_path}")
if not os.path.exists(tb_path):
continue
df = tflogs2pandas.tflog2pandas(tb_path)
df = df[df["metric"].str.startswith("eval/")]
# df["num_mutate"] = job["num_mutate"]
df["alignment_id"] = job["seed"]
df["custom_alignment"] = job["custom_alignment"]
df["str_md5"] = job["str_md5"]
df["vacc_run_seed"] = job['run_seed']
dfs.append(df)
df = pd.concat(dfs)
# print(df)
df.to_pickle(cache_path)
return df
df = load_tb(args.force_read)
# print(df)
# print(all_jobs)
import seaborn as sns
import matplotlib.pyplot as plt
def check_finished():
sns.countplot(data=df, x="str_md5") # check every run is here
plt.show()
plt.close()
# check_finished()
def label_body(metric):
body = ""
if "399" in metric:
body = "Walker2D"
elif "499" in metric:
body = "HalfCheetah"
elif "599" in metric:
body = "Ant"
elif "699" in metric:
body = "Hopper"
return body
df["body"] = df.apply(lambda row: label_body(row['metric']), axis=1)
def label_best_worst(str_md5, best_str_md5 = ""):
if str_md5==best_str_md5:
return "Best"
else:
return "Worst"
def plot_best_vs_worst(evaluate_at_step = 5005312.0, dry_run=False):
_df = df[(df["step"]==evaluate_at_step)]
mean_final_values = _df.groupby(['str_md5'], sort=False)['value'].mean().sort_values()
# print(mean_final_values)
# print(mean_final_values.index[0], mean_final_values.index[-1])
worst_str_md5 = mean_final_values.index[0]
best_str_md5 = mean_final_values.index[-1]
print(df[df["str_md5"]==worst_str_md5].head(n=10))
if not dry_run:
_df = df[(df["str_md5"]==worst_str_md5)|(df["str_md5"]==best_str_md5)]
_df['label'] = _df.apply(lambda row: label_best_worst(row['str_md5'], best_str_md5), axis=1)
g = sns.FacetGrid(data=_df, col="body", hue="label")
g.map(sns.lineplot, "step", "value")
g.fig.suptitle("Random generate 40 alignments and pick the best and worst.")
g.add_legend()
plt.tight_layout()
plt.savefig(f"output_data/tmp/best_vs_worst_vanilla4.png")
plt.close()
print("")
print(f"best_alignment:")
_df = df[(df["str_md5"]==best_str_md5)]
print(_df.iloc[0]["custom_alignment"])
print(_df.iloc[0]["str_md5"])
print(f"worst_alignment:")
_df = df[(df["str_md5"]==worst_str_md5)]
print(_df.iloc[0]["custom_alignment"])
print(_df.iloc[0]["str_md5"])
plot_best_vs_worst(dry_run=False)
def plot_all():
g = sns.FacetGrid(data=df, col="metric", hue="str_md5")
g.map(sns.lineplot, "step", "value")
plt.show()
# plot_all() | 32.891892 | 143 | 0.62476 |
acf4c4e50634272069a6733bd7f5d5d35769e1bb | 38,014 | py | Python | tests/handlers/v2/test_requests.py | jaydhulia/consoleme | 18d7c7603bd2ea454bfa8e3de24d963e91bcce47 | [
"Apache-2.0"
] | 2,835 | 2020-12-09T19:07:24.000Z | 2022-03-31T06:38:44.000Z | tests/handlers/v2/test_requests.py | jaydhulia/consoleme | 18d7c7603bd2ea454bfa8e3de24d963e91bcce47 | [
"Apache-2.0"
] | 179 | 2020-12-10T01:51:25.000Z | 2022-03-31T02:06:06.000Z | tests/handlers/v2/test_requests.py | jaydhulia/consoleme | 18d7c7603bd2ea454bfa8e3de24d963e91bcce47 | [
"Apache-2.0"
] | 219 | 2020-12-09T21:30:56.000Z | 2022-03-31T05:57:36.000Z | from unittest.mock import mock_open, patch
import ujson as json
from deepdiff import DeepDiff
from tornado.testing import AsyncHTTPTestCase
class TestRequestsHandler(AsyncHTTPTestCase):
def get_app(self):
from consoleme.config import config
self.config = config
from consoleme.routes import make_app
return make_app(jwt_validator=lambda x: {})
def test_get(self):
# Method not allowed
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch("/api/v2/requests", method="GET", headers=headers)
self.assertEqual(response.code, 405)
def test_requestshandler_post(self):
mock_request_data = [
{
"request_id": 12345,
"username": "user@example.com",
"request_time": 22345,
},
{
"request_id": 12346,
"username": "userb@example.com",
"request_time": 12345,
},
]
expected_response = {
"totalCount": 2,
"filteredCount": 2,
"data": mock_request_data,
}
from consoleme.lib.redis import RedisHandler
# Mocked by fakeredis
red = RedisHandler().redis_sync()
red.set(
self.config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
json.dumps(mock_request_data),
)
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch(
"/api/v2/requests", method="POST", headers=headers, body="{}"
)
self.assertEqual(response.code, 200)
diff = DeepDiff(json.loads(response.body), expected_response)
self.assertFalse(diff)
def test_post_request(self):
mock_request_data = {
"justification": "test asdf",
"admin_auto_approve": False,
"changes": {
"changes": [
{
"principal": {
"principal_arn": "arn:aws:iam::123456789012:role/TestInstanceProfile",
"principal_type": "AwsResource",
},
"change_type": "inline_policy",
"action": "attach",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": ["sqs:SetQueueAttributes"],
"Effect": "Allow",
"Resource": [
"arn:aws:sqs:us-east-1:223456789012:queue"
],
}
],
}
},
}
]
},
}
from consoleme.lib.redis import RedisHandler
# Mocked by fakeredis
red = RedisHandler().redis_sync()
red.set(
self.config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
json.dumps(mock_request_data),
)
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(mock_request_data),
)
self.assertEqual(response.code, 200)
response_d = json.loads(response.body)
self.assertEqual(response_d["errors"], 0)
self.assertEqual(response_d["request_created"], True)
self.assertIn("/policies/request/", response_d["request_url"])
def test_post_request_admin_auto_approve(self):
mock_request_data = {
"justification": "test asdf",
"admin_auto_approve": True,
"changes": {
"changes": [
{
"principal": {
"principal_arn": "arn:aws:iam::123456789012:role/TestInstanceProfile",
"principal_type": "AwsResource",
},
"change_type": "inline_policy",
"action": "attach",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": ["sqs:SetQueueAttributes"],
"Effect": "Allow",
"Resource": [
"arn:aws:sqs:us-east-1:223456789012:queue"
],
}
],
}
},
}
]
},
}
from consoleme.lib.redis import RedisHandler
# Mocked by fakeredis
red = RedisHandler().redis_sync()
red.set(
self.config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
json.dumps(mock_request_data),
)
headers = {
self.config.get("auth.user_header_name"): "consoleme_admins@example.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(mock_request_data),
)
self.assertEqual(response.code, 200)
response_d = json.loads(response.body)
self.assertEqual(response_d["errors"], 0)
self.assertEqual(response_d["request_created"], True)
self.assertIn("/policies/request/", response_d["request_url"])
self.assertIn(
{"status": "success", "message": "Successfully updated request status"},
response_d["action_results"],
)
self.assertIn(
{"status": "success", "message": "Successfully updated change in dynamo"},
response_d["action_results"],
)
def test_post_limit(self):
mock_request_data = [
{"request_id": 12345, "username": "user@example.com"},
{"request_id": 12346, "username": "userb@example.com"},
]
from consoleme.lib.redis import RedisHandler
# Mocked by fakeredis
red = RedisHandler().redis_sync()
red.set(
self.config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
json.dumps(mock_request_data),
)
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch(
"/api/v2/requests",
method="POST",
headers=headers,
body=json.dumps({"limit": 1}),
)
self.assertEqual(response.code, 200)
self.assertEqual(len(json.loads(response.body)), 3)
self.assertEqual(len(json.loads(response.body)["data"]), 1)
def test_post_filter(self):
mock_request_data = [
{"request_id": 12345, "username": "user@example.com"},
{"request_id": 12346, "username": "userb@example.com"},
]
from consoleme.lib.redis import RedisHandler
# Mocked by fakeredis
red = RedisHandler().redis_sync()
red.set(
self.config.get("cache_policy_requests.redis_key", "ALL_POLICY_REQUESTS"),
json.dumps(mock_request_data),
)
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
response = self.fetch(
"/api/v2/requests",
method="POST",
headers=headers,
body=json.dumps({"filters": {"request_id": "12346"}}),
)
self.assertEqual(response.code, 200)
res = json.loads(response.body)
self.assertEqual(len(json.loads(response.body)), 3)
self.assertEqual(len(json.loads(response.body)["data"]), 1)
self.assertEqual(res["data"][0], mock_request_data[1])
def test_post_new_managed_policy_resource_request(self):
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
input_body = {
"admin_auto_approve": False,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/testpolicy",
},
"change_type": "managed_policy_resource",
"new": True,
"action": "update",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::12345",
"arn:aws:s3:::12345/*",
],
}
],
}
},
}
]
},
}
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(input_body),
)
result = json.loads(response.body)
result.pop("request_id")
result.pop("request_url")
result["extended_request"].pop("id")
result["extended_request"].pop("timestamp")
result["extended_request"]["changes"]["changes"][0].pop("id")
self.assertEqual(
result,
{
"errors": 0,
"request_created": True,
"action_results": [],
"extended_request": {
"request_url": None,
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/testpolicy",
},
"justification": None,
"requester_email": "user@github.com",
"approvers": [],
"request_status": "pending",
"cross_account": False,
"arn_url": "/policies/edit/123456789012/managed_policy/testpolicy",
"admin_auto_approve": False,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/testpolicy",
},
"change_type": "managed_policy_resource",
"resources": [],
"version": "3.0",
"status": "not_applied",
"autogenerated": False,
"updated_by": None,
"new": True,
"policy": {
"version": None,
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::12345",
"arn:aws:s3:::12345/*",
],
}
],
},
"policy_sha256": None,
},
"old_policy": None,
}
]
},
"requester_info": {
"email": "user@github.com",
"extended_info": {
"domain": "",
"userName": "user@github.com",
"name": {"givenName": "", "familyName": "", "fullName": ""},
"primaryEmail": "user@github.com",
},
"details_url": None,
"photo_url": "https://www.gravatar.com/avatar/1496f7f4fd086e2d0a0460220331e9ec?d=mp",
},
"reviewer": None,
"comments": [],
},
},
)
def test_post_new_managed_policy_resource_request_autoapprove(self):
headers = {
self.config.get("auth.user_header_name"): "consoleme_admins@example.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
input_body = {
"admin_auto_approve": True,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/randompath/extra/testpolicy",
},
"change_type": "managed_policy_resource",
"new": True,
"action": "update",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::12345",
"arn:aws:s3:::12345/*",
],
}
],
}
},
}
]
},
}
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(input_body),
)
result = json.loads(response.body)
result.pop("request_id")
result.pop("request_url")
result["extended_request"].pop("id")
result["extended_request"].pop("timestamp")
result["extended_request"]["changes"]["changes"][0].pop("id")
result["extended_request"].pop("comments")
self.assertEqual(
result,
{
"errors": 0,
"request_created": True,
"action_results": [
{
"status": "success",
"message": "Successfully created managed policy arn:aws:iam::123456789012:policy/randompath/extra/testpolicy",
},
{
"status": "success",
"message": "Successfully updated change in dynamo",
},
{
"status": "success",
"message": "Successfully updated request status",
},
],
"extended_request": {
"request_url": None,
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/randompath/extra/testpolicy",
},
"justification": None,
"requester_email": "consoleme_admins@example.com",
"approvers": [],
"request_status": "approved",
"cross_account": False,
"arn_url": "/policies/edit/123456789012/managed_policy/randompath/extra/testpolicy",
"admin_auto_approve": True,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:policy/randompath/extra/testpolicy",
},
"change_type": "managed_policy_resource",
"resources": [],
"version": "3.0",
"status": "applied",
"autogenerated": False,
"updated_by": "consoleme_admins@example.com",
"new": True,
"policy": {
"version": None,
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::12345",
"arn:aws:s3:::12345/*",
],
}
],
},
"policy_sha256": None,
},
"old_policy": None,
}
]
},
"requester_info": {
"email": "consoleme_admins@example.com",
"extended_info": {
"domain": "",
"userName": "consoleme_admins@example.com",
"name": {"givenName": "", "familyName": "", "fullName": ""},
"primaryEmail": "consoleme_admins@example.com",
},
"details_url": None,
"photo_url": "https://www.gravatar.com/avatar/ec2ee26a6397f686011678e50aeb4e81?d=mp",
},
"reviewer": "consoleme_admins@example.com",
},
},
)
def test_post_iam_role_request_dry_run(self):
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
input_body = {
"dry_run": True,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:role/RoleNumber1",
},
"change_type": "inline_policy",
"action": "attach",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": ["sqs:*"],
"Effect": "Allow",
"Resource": ["*"],
}
],
}
},
}
]
},
}
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(input_body),
)
result = json.loads(response.body)
result["extended_request"].pop("id")
result["extended_request"].pop("timestamp")
result["extended_request"]["changes"]["changes"][0].pop("id")
result["extended_request"]["changes"]["changes"][0].pop("policy_name")
self.assertEqual(
result,
{
"errors": 0,
"request_created": False,
"request_id": None,
"request_url": None,
"action_results": None,
"extended_request": {
"request_url": None,
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:role/RoleNumber1",
},
"justification": None,
"requester_email": "user@github.com",
"approvers": [],
"request_status": "pending",
"cross_account": False,
"arn_url": "/policies/edit/123456789012/iamrole/RoleNumber1",
"admin_auto_approve": False,
"changes": {
"changes": [
{
"principal": {
"principal_type": "AwsResource",
"principal_arn": "arn:aws:iam::123456789012:role/RoleNumber1",
},
"change_type": "inline_policy",
"resources": [],
"version": "3.0",
"status": "not_applied",
"autogenerated": False,
"updated_by": None,
"new": True,
"action": "attach",
"policy": {
"version": None,
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": ["sqs:*"],
"Effect": "Allow",
"Resource": ["*"],
}
],
},
"policy_sha256": None,
},
"old_policy": None,
}
]
},
"requester_info": {
"email": "user@github.com",
"extended_info": {
"domain": "",
"userName": "user@github.com",
"name": {"givenName": "", "familyName": "", "fullName": ""},
"primaryEmail": "user@github.com",
},
"details_url": None,
"photo_url": "https://www.gravatar.com/avatar/1496f7f4fd086e2d0a0460220331e9ec?d=mp",
},
"reviewer": None,
"comments": [],
},
},
)
@patch("git.Repo")
@patch("git.Git")
def test_post_honeybee_request_dry_run(self, mock_git, mock_repo):
headers = {
self.config.get("auth.user_header_name"): "user@github.com",
self.config.get("auth.groups_header_name"): "groupa,groupb,groupc",
}
input_body = {
"dry_run": True,
"changes": {
"changes": [
{
"principal": {
"principal_type": "HoneybeeAwsResourceTemplate",
"repository_name": "honeybee_templates",
"resource_identifier": "iamrole/abc.yaml",
"resource_url": "http://example.com/fake_repo/path/to/file.yaml",
},
"change_type": "inline_policy",
"action": "attach",
"policy": {
"policy_document": {
"Version": "2012-10-17",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::bucketa",
"arn:aws:s3:::bucketa/*",
],
"Sid": "cmccastrapel1623864565xhwu",
}
],
}
},
}
]
},
}
template_data = """
Policies:
- IncludeAccounts:
- account_a
- account_b
- account_c
PolicyName: policy_a
Statement:
- Action:
- '*'
Effect: Allow
Resource:
- '*'
Sid: admin"""
with patch("builtins.open", mock_open(read_data=template_data)):
response = self.fetch(
"/api/v2/request",
method="POST",
headers=headers,
body=json.dumps(input_body),
)
result = json.loads(response.body)
result["extended_request"].pop("timestamp")
result["extended_request"].pop("id")
yaml_policy = result["extended_request"]["changes"]["changes"][0].pop(
"policy"
)
from consoleme.lib.yaml import yaml
# Get this in a standard dictionary format
generated_policy = json.loads(json.dumps(yaml.load(yaml_policy)))
generated_policy["Policies"][1]["Statement"][0].pop("Sid")
self.assertEqual(
generated_policy,
{
"Policies": [
{
"IncludeAccounts": ["account_a", "account_b", "account_c"],
"PolicyName": "policy_a",
"Statement": [
{
"Action": ["*"],
"Effect": "Allow",
"Resource": ["*"],
"Sid": "admin",
}
],
},
{
"PolicyName": "self_service_generated",
"Statement": [
{
"Action": [
"s3:GetObjectVersionTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:GetObject",
"s3:GetObjectVersionAcl",
"s3:GetObjectTagging",
"s3:GetObjectVersion",
"s3:ListBucketVersions",
],
"Effect": "Allow",
"Resource": [
"arn:aws:s3:::bucketa",
"arn:aws:s3:::bucketa/*",
],
}
],
},
]
},
)
self.assertEqual(
result,
{
"errors": 0,
"request_created": False,
"request_id": None,
"request_url": None,
"action_results": None,
"extended_request": {
"request_url": "",
"principal": {
"principal_type": "HoneybeeAwsResourceTemplate",
"repository_name": "honeybee_templates",
"resource_identifier": "iamrole/abc.yaml",
"resource_url": "http://example.com/fake_repo/path/to/file.yaml",
},
"justification": None,
"requester_email": "user@github.com",
"approvers": [],
"request_status": "pending",
"cross_account": False,
"arn_url": None,
"admin_auto_approve": False,
"changes": {
"changes": [
{
"principal": {
"principal_type": "HoneybeeAwsResourceTemplate",
"repository_name": "honeybee_templates",
"resource_identifier": "iamrole/abc.yaml",
"resource_url": "http://example.com/fake_repo/path/to/file.yaml",
},
"change_type": "generic_file",
"resources": [],
"version": 3.0,
"status": "not_applied",
"id": None,
"autogenerated": False,
"updated_by": None,
"action": "attach",
"old_policy": "Policies:\n - IncludeAccounts:\n - account_a\n - account_b\n - account_c\n PolicyName: policy_a\n Statement:\n - Action:\n - '*'\n Effect: Allow\n Resource:\n - '*'\n Sid: admin\n",
"encoding": "yaml",
}
]
},
"requester_info": {
"email": "user@github.com",
"extended_info": {
"domain": "",
"userName": "user@github.com",
"name": {
"givenName": "",
"familyName": "",
"fullName": "",
},
"primaryEmail": "user@github.com",
},
"details_url": None,
"photo_url": "https://www.gravatar.com/avatar/1496f7f4fd086e2d0a0460220331e9ec?d=mp",
},
"reviewer": None,
"comments": [],
},
},
)
class TestRequestDetailHandler(AsyncHTTPTestCase):
def get_app(self):
from consoleme.routes import make_app
return make_app(jwt_validator=lambda x: {})
def test_get(self):
# expected = {
# "status": 501,
# "title": "Not Implemented",
# "message": "Get request details",
# }
# headers = {
# config.get("auth.user_header_name"): "user@github.com",
# config.get("auth.groups_header_name"): "groupa,groupb,groupc",
# }
# response = self.fetch(
# "/api/v2/requests/16fd2706-8baf-433b-82eb-8c7fada847da",
# method="GET",
# headers=headers,
# )
# TODO: add unit tests
pass
# self.assertEqual(response.code, 501)
# self.assertDictEqual(json.loads(response.body), expected)
def test_put(self):
# expected = {
# "status": 501,
# "title": "Not Implemented",
# "message": "Update request details",
# }
# headers = {
# config.get("auth.user_header_name"): "user@github.com",
# config.get("auth.groups_header_name"): "groupa,groupb,groupc",
# }
# response = self.fetch(
# "/api/v2/requests/16fd2706-8baf-433b-82eb-8c7fada847da",
# method="PUT",
# headers=headers,
# body="{}",
# )
# self.assertEqual(response.code, 501)
# self.assertDictEqual(json.loads(response.body), expected)
# TODO: add unit tests
pass
| 42.760405 | 298 | 0.360446 |
acf4c640830ed5a145344f083c8e6c91c9522d38 | 2,277 | py | Python | torch/legacy/nn/SpatialAveragePooling.py | UmaTaru/run | be29e4d41a4de3dee27cd6796801bfe51382d294 | [
"MIT"
] | 51 | 2020-01-26T23:32:57.000Z | 2022-03-20T14:49:57.000Z | torch/legacy/nn/SpatialAveragePooling.py | UmaTaru/run | be29e4d41a4de3dee27cd6796801bfe51382d294 | [
"MIT"
] | 2 | 2020-12-19T20:00:28.000Z | 2021-03-03T20:22:45.000Z | torch/legacy/nn/SpatialAveragePooling.py | UmaTaru/run | be29e4d41a4de3dee27cd6796801bfe51382d294 | [
"MIT"
] | 33 | 2020-02-18T16:15:48.000Z | 2022-03-24T15:12:05.000Z | import torch
from .Module import Module
class SpatialAveragePooling(Module):
def __init__(self, kW, kH, dW=1, dH=1, padW=0, padH=0):
super(SpatialAveragePooling, self).__init__()
self.kW = kW
self.kH = kH
self.dW = dW
self.dH = dH
self.padW = padW
self.padH = padH
self.ceil_mode = False
self.count_include_pad = True
self.divide = True
def ceil(self):
self.ceil_mode = True
return self
def floor(self):
self.ceil_mode = False
return self
def setCountIncludePad(self):
self.count_include_pad = True
return self
def setCountExcludePad(self):
self.count_include_pad = False
return self
def updateOutput(self, input):
self._backend.SpatialAveragePooling_updateOutput(
self._backend.library_state,
input,
self.output,
self.kW, self.kH,
self.dW, self.dH,
self.padW, self.padH,
self.ceil_mode,
self.count_include_pad
)
# for backward compatibility with saved models
# which are not supposed to have "divide" field
if not self.divide:
self.output.mul_(self.kW * self.kH)
return self.output
def updateGradInput(self, input, gradOutput):
if self.gradInput is not None:
self._backend.SpatialAveragePooling_updateGradInput(
self._backend.library_state,
input,
gradOutput,
self.gradInput,
self.kW, self.kH,
self.dW, self.dH,
self.padW, self.padH,
self.ceil_mode,
self.count_include_pad
)
# for backward compatibility
if not self.divide:
self.gradInput.mul_(self.kW * self.kH)
return self.gradInput
def __repr__(self):
s = super(SpatialAveragePooling, self).__repr__()
s += '({}x{}, {}, {}'.format(self.kW, self.kH, self.dW, self.dH)
if (self.padW or self.padH) and (self.padW != 0 or self.padH != 0):
s += ', {}, {}'.format(self.padW, self.padH)
s += ')'
return s
| 28.4625 | 75 | 0.548968 |
acf4c77608f204fd9132b180d8e30beb432debe8 | 4,744 | py | Python | flow/scheduling/base.py | Charlottez112/signac-flow | a8bdd35b5244887a26b8146ede2f718865f02062 | [
"BSD-3-Clause"
] | 49 | 2019-01-31T01:37:15.000Z | 2022-03-28T17:09:08.000Z | flow/scheduling/base.py | Charlottez112/signac-flow | a8bdd35b5244887a26b8146ede2f718865f02062 | [
"BSD-3-Clause"
] | 528 | 2019-02-03T23:02:17.000Z | 2022-03-30T08:52:32.000Z | flow/scheduling/base.py | Charlottez112/signac-flow | a8bdd35b5244887a26b8146ede2f718865f02062 | [
"BSD-3-Clause"
] | 42 | 2019-02-13T21:07:43.000Z | 2022-03-13T12:29:45.000Z | # Copyright (c) 2018 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
"""Definition of base classes for the scheduling system."""
import enum
import subprocess
import tempfile
import time
from abc import ABC, abstractmethod
from ..errors import SubmitError
class JobStatus(enum.IntEnum):
"""Classifies the job's execution status."""
unknown = 1
"""Unknown cluster job status."""
registered = 2
"""The cluster job is registered with the scheduler, but no other status is known."""
inactive = 3
"""The cluster job is inactive.
This includes states like completed, cancelled, or timed out.
"""
submitted = 4
"""The cluster job has been submitted.
Note that this state is never returned by a scheduler, but is an assumed
state immediately after a cluster job is submitted.
"""
held = 5
"""The cluster job is held."""
queued = 6
"""The cluster job is queued."""
active = 7
"""The cluster job is actively running."""
error = 8
"""The cluster job is in an error or failed state."""
placeholder = 127
"""A placeholder state that is used for status rendering when no operations are eligible."""
user = 128
"""All user-defined states must be >=128 in value."""
class ClusterJob:
"""Class representing a cluster job."""
def __init__(self, job_id, status=None):
self._job_id = job_id
self._status = status
def _id(self):
return self._job_id
def __str__(self):
"""Return job ID string."""
return str(self._id())
def name(self):
"""Return the name of the cluster job."""
return self._id()
def status(self):
"""Return the status of the cluster job."""
return self._status
class Scheduler(ABC):
"""Abstract base class for schedulers."""
# The UNIX time stamp of the last scheduler query.
_last_query = None
# The amount of time in seconds to wait between scheduler queries.
# Repeated scheduler queries might risk a denial-of-service attack.
_dos_timeout = 10
@classmethod
def _prevent_dos(cls):
"""Prevent denial of service by enforcing a back-off period.
This method should *always* be called before querying the scheduler.
This method will raise an exception if it is called more than
once within a time window defined by the value of ``_dos_timeout``.
This is to prevent an (accidental) denial-of-service attack on the
scheduling system.
"""
if cls._last_query is not None:
if time.time() - cls._last_query < cls._dos_timeout:
raise RuntimeError("Too many scheduler requests within a short time!")
cls._last_query = time.time()
@abstractmethod
def jobs(self):
"""Yield all cluster jobs.
Yields
------
:class:`.ClusterJob`
Cluster job.
"""
raise NotImplementedError()
@abstractmethod
def submit(self, script, **kwargs):
"""Submit a job script to the scheduler for execution."""
raise NotImplementedError()
@classmethod
@abstractmethod
def is_present(cls):
"""Return True if the scheduler is detected."""
raise NotImplementedError()
def _call_submit(submit_cmd, script, pretend):
"""Call submit command with a temporary script file.
Parameters
----------
submit_cmd : list[str]
List of strings composing the submission command and any flags.
script : str
Script as a string.
pretend : bool
If True, the script will be printed to screen instead of submitted.
Returns
-------
bool
True if the submission command succeeds (or in pretend mode).
Raises
------
:class:`~flow.errors.SubmitError`
If the submission command fails.
"""
submit_cmd_string = " ".join(submit_cmd)
if pretend:
print(f"# Submit command: {submit_cmd_string}")
print(script)
print()
else:
with tempfile.NamedTemporaryFile() as tmp_submit_script:
tmp_submit_script.write(str(script).encode("utf-8"))
tmp_submit_script.flush()
submit_cmd.append(tmp_submit_script.name)
try:
subprocess.check_output(
submit_cmd, stderr=subprocess.STDOUT, universal_newlines=True
)
except subprocess.CalledProcessError as error:
raise SubmitError(
f"Error when calling submission command {submit_cmd_string}:\n{error.output}"
)
return True
| 27.74269 | 97 | 0.631956 |
acf4c7af7981697a0181d74b80be63f9f387337c | 21,949 | py | Python | turbulenz_tools/tools/obj2json.py | turbulenz/turbulenz_tools | 36e4a15d4fd9cdc0adf0ea365e2e09013565d6fa | [
"MIT"
] | 12 | 2015-01-26T16:15:28.000Z | 2021-12-11T08:51:41.000Z | turbulenz_tools/tools/obj2json.py | turbulenz/turbulenz_tools | 36e4a15d4fd9cdc0adf0ea365e2e09013565d6fa | [
"MIT"
] | null | null | null | turbulenz_tools/tools/obj2json.py | turbulenz/turbulenz_tools | 36e4a15d4fd9cdc0adf0ea365e2e09013565d6fa | [
"MIT"
] | 6 | 2015-02-23T13:39:54.000Z | 2021-09-30T10:04:22.000Z | #!/usr/bin/python
# Copyright (c) 2009-2013 Turbulenz Limited
"""
Convert LightWave (.obj) OBJ2 files into a Turbulenz JSON asset.
Supports generating NBTs.
"""
import logging
LOG = logging.getLogger('asset')
# pylint: disable=W0403
from stdtool import standard_main, standard_json_out, standard_include
from asset2json import JsonAsset
from mesh import Mesh
from node import NodeName
from os.path import basename
# pylint: enable=W0403
__version__ = '1.2.2'
__dependencies__ = ['asset2json', 'mesh', 'node', 'vmath']
DEFAULT_EFFECT_NAME = 'lambert'
# Note:
# * Does not have support for .mtl files yet, but expects any relevant materials
# to be declared in a .material file and included as a dependancy in deps.yaml
# * This script sets the default node name of the asset parsed to be the file name (without the path),
# unless anything else is supplied. This could lead to clashes with other nodes with the same name.
# * Each surface is assumed to only have a single material. A new surface will be made upon requiring a new material.
#######################################################################################################################
def _increment_name(name):
"""Returns a name similar to the inputted name
If the inputted name ends with a hyphen and then a number,
then the outputted name has that number incremented.
Otherwise, the outputted name has a hyphen and the number '1' appended to it.
"""
index = name.rfind('-')
if index == -1:
# The is not already followed by a number, so just append -1
return name + '-1'
else:
# The number is already followed by a number, so increment the number
value = int(name[index+1:])
return name[:index+1] + str(value+1)
def purge_empty(dictionary, recurseOnce = False):
"""Removes all elements of a dictionary which return true for elem.is_empty().
If recurseOnce is True, then it will call element.purge_empty()
for any remaining elements in the dictionary. This will not recurse further."""
names_empty = []
for name, elem in dictionary.iteritems():
if elem.is_empty():
names_empty.append(name)
# Need to compile the list of names of empty elements in a separate loop above,
# to avoid changing dictionary during iteration over it directly.
for name in names_empty:
del dictionary[name]
if recurseOnce:
for elem in dictionary.itervalues():
# Note that it is here assumed that the element has a method purge_empty
elem.purge_empty()
#######################################################################################################################
class Surface(object):
"""Represents a surface (a.k.a. group) as parsed from the .obj file into .json format
Contains a material, and indices into the Obj2json.primitives list
to identify which polygons belong to this surface."""
def __init__(self, first, material):
# first refers to initial index of triangles parsed
# count refers to number of triangles parsed
self.first = first
self.count = 0
self.material_name = material
def is_empty(self):
return self.count == 0
class Shape(object):
"""Represents a shape (a.k.a. object) as parsed from the .obj file into .json format
Contains a dictionary of names -> surfaces."""
# NB: Do not pass in Null for the surfaces parameter, all shapes are assumed to
# contain a dictionary with at least one surface, even if it is a default one.
def __init__(self, surfaces):
# Dictionary of names -> surfaces
self.surfaces = surfaces
def is_empty(self):
empty = True
for surface in self.surfaces.itervalues():
# If a single surface is non-empty, empty will be false
empty = empty and surface.is_empty()
return empty
def purge_empty(self):
purge_empty(self.surfaces)
#######################################################################################################################
# pylint: disable=R0904
class Obj2json(Mesh):
"""Parse a OBJ file and generate a Turbulenz JSON geometry asset."""
# TODO: Probably some more information should be moved from the Obj2json class to the Shape class.
def __init__(self, obj_file_name):
self.default_shape_name = obj_file_name or 'initial_shape'
self.default_surface_name = 'initial_surface'
self.default_material_name = 'default'
# Name of the shape/surface/material from the most recently parsed 'o'/'g'/'usemtl' line respectively
self.curr_shape_name = self.default_shape_name
self.curr_surf_name = self.default_surface_name
self.curr_mtl_name = self.default_material_name
# To keep of track of number of polygons parsed so far
self.next_polygon_index = 0
# A tuple of indices per vertex element
self.indices = [ ]
# Shortcut to be able to access the current surface in fewer characters and lookups
self.curr_surf = Surface(0, self.default_material_name)
# Dictionary of names -> shapes. Initialise to a default shape with a default surface
self.shapes = { self.default_shape_name : Shape({ self.default_surface_name : self.curr_surf }) }
Mesh.__init__(self)
def __read_object_name(self, data):
"""Parse the 'o' line. This line contains the mesh name."""
LOG.debug("object name:%s", data)
# Remove any shapes with no surfaces (e.g. the default shape if a named one is given)
purge_empty(self.shapes)
# If a shape with this name has already been declared, do not change it,
# append the following faces to the current shape in stead
if data not in self.shapes:
self.curr_shape_name = data
self.curr_surf = Surface(self.next_polygon_index, self.curr_mtl_name)
self.shapes[data] = Shape({self.curr_surf_name : self.curr_surf})
def __read_group_name(self, data):
"""Parse the 'g' line. This indicates the start of a group (surface)."""
# Remove leading/trailing whitespace
data = data.strip()
LOG.debug("group name:%s", data)
# Note: Don't purge empty shapes/surfaces here, you might remove a new surface
# created by a preceding 'usemtl' line. Purging happens after parsing.
self.curr_surf_name = data
# Use most recently specified material (unless overridden later)
self.curr_surf = Surface(self.next_polygon_index, self.curr_mtl_name)
self.shapes[self.curr_shape_name].surfaces[data] = self.curr_surf
def __read_material(self, data):
"""Parse the 'usemtl' line. This references a material."""
data = data.strip()
LOG.debug("material name:%s", data)
self.curr_mtl_name = data
if self.curr_surf.is_empty():
# No polygons (yet) in the current surface, so just set its material
self.curr_surf.material_name = data
elif self.curr_surf.material_name != data:
# Current surface already has a number of faces of a different material
# so create a new surface for this new material
self.curr_surf = Surface(self.next_polygon_index, data)
self.curr_surf_name = _increment_name(self.curr_surf_name)
self.shapes[self.curr_shape_name].surfaces[self.curr_surf_name] = self.curr_surf
def __read_vertex_position(self, data):
"""Parse the 'v' line. This line contains the vertex position."""
sv = data.split(' ')
position = (float(sv[0]), float(sv[1]), float(sv[2]))
self.positions.append(position)
# Do not calculate the bounding box here, as some unused vertices may later be removed
def __read_vertex_uvs(self, data):
"""Parse the 'vt' line. This line contains the vertex uvs."""
# Texture coordinates
sv = data.split(' ')
if len(sv) == 2:
uvs = (float(sv[0]), float(sv[1]))
else:
uvs = (float(sv[0]), float(sv[1]), float(sv[2]))
self.uvs[0].append(uvs)
def __read_vertex_normal(self, data):
"""Parse the 'vn' line. This line contains the vertex normals."""
(sv0, sv1, sv2) = data.split(' ')
normal = (float(sv0), float(sv1), -float(sv2))
self.normals.append(normal)
def __read_face(self, data):
"""Parse the 'f' line. This line contains a face.
Constructs a tri-fan if face has more than 3 edges."""
def __extract_indices(si):
"""Add a tuple of indices."""
# Vertex index / Texture index / Normal index
# Subtract 1 to count indices from 0, not from 1.
s = si.split('/')
if len(s) == 1:
return [int(s[0]) - 1]
if len(s) == 2:
return (int(s[0]) - 1, int(s[1]) - 1)
else:
if len(s[1]) == 0:
return (int(s[0]) - 1, int(s[2]) - 1)
else:
return (int(s[0]) - 1, int(s[1]) - 1, int(s[2]) - 1)
# Split string into list of vertices
si = data.split()
indices = self.indices
# Construct a tri-fan of all the vertices supplied (no support for quadrilaterals or polygons)
# Origin vertex of fan
i0 = __extract_indices(si[0])
prevInd = __extract_indices(si[1])
for i in xrange(2, len(si)):
currInd = __extract_indices(si[i])
indices.append(i0)
indices.append(prevInd)
indices.append(currInd)
prevInd = currInd
num_triangles = len(si) - 2
self.next_polygon_index += num_triangles
self.curr_surf.count += num_triangles
def __ignore_comments(self, data):
"""Ignore comments."""
#######################################################################################################################
def parse(self, f, prefix = ""):
"""Parse an OBJ file stream."""
chunks_with_data = { 'v': Obj2json.__read_vertex_position,
'vt': Obj2json.__read_vertex_uvs,
'vn': Obj2json.__read_vertex_normal,
'f': Obj2json.__read_face,
'o': Obj2json.__read_object_name,
'g': Obj2json.__read_group_name,
'usemtl': Obj2json.__read_material,
'#': Obj2json.__ignore_comments}
for lineNumber, line in enumerate(f):
# The middle of the tuple is just whitespace
(command, _, data) = line.partition(' ')
if len(data) > 0:
data = data[:-1]
while len(data) > 0 and data[0] == ' ':
data = data[1:]
if len(data) > 0:
# After stripping away excess whitespace
address_string = "(%d) %s%s" % (lineNumber, prefix, command)
if command in chunks_with_data:
LOG.debug(address_string)
# Parse data depending on its type
chunks_with_data[command](self, data)
else:
LOG.warning(address_string + " *unsupported*")
def unpack_vertices(self):
"""Unpack the vertices."""
# Consecutive list of nodes making up faces (specifically, triangles)
indices = []
num_components = 1
if 0 < len(self.uvs[0]):
num_components += 1
if 0 < len(self.normals):
num_components += 1
# A node of a face definition consists of a vertex index, and optional
# texture coord index and an optional normal vector index
# Thus, the length of an element of self.indices can be 1, 2 or 3.
if num_components == 1:
# No texture coordinates (uv) or normal vector specified.
indices = [x[0] for x in self.indices]
else:
old_positions = self.positions
old_uvs = self.uvs[0]
old_normals = self.normals
positions = [] # Vertex position
uvs = [] # Texture coordinate
normals = []
mapping = {}
if num_components == 2:
for indx in self.indices:
i0 = indx[0]
if len(indx) >= 2:
i1 = indx[1]
else:
i1 = 0
hash_string = "%x:%x" % (i0, i1)
if hash_string in mapping:
indices.append(mapping[hash_string])
else:
newindx = len(positions)
mapping[hash_string] = newindx
indices.append(newindx)
positions.append(old_positions[i0])
# Figure out whether 2nd value is uv or normal
if len(old_uvs) != 0:
uvs.append(old_uvs[i1])
else:
normals.append(old_normals[i1])
else:
for indx in self.indices:
i0 = indx[0]
if len(indx) >= 2:
i1 = indx[1]
else:
i1 = 0
if len(indx) >= 3:
i2 = indx[2]
else:
i2 = 0
hash_string = "%x:%x:%x" % (i0, i1, i2)
if hash_string in mapping:
indices.append(mapping[hash_string])
else:
newindx = len(positions)
mapping[hash_string] = newindx
indices.append(newindx)
positions.append(old_positions[i0])
uvs.append(old_uvs[i1])
normals.append(old_normals[i2])
# Reassign the vertex positions, texture coordinates and normals, so
# that they coincide with the indices defining the triangles.
self.positions = positions
self.uvs[0] = uvs
self.normals = normals
self.generate_primitives(indices)
def extract_nbt_options(self, definitions_asset):
"""Returns whether normals and tangents/binormals are needed, and whether they should be generated.
Loops over each material and checks their meta attributes to extract this information."""
# Record the whether normals/tangents need to be generated, and which shapes require these options
generate_normals = False
generate_tangents = False
need_normals = set()
need_tangents = set()
for shape_name in self.shapes.iterkeys():
for surface_name in self.shapes[shape_name].surfaces.iterkeys():
material_name = self.shapes[shape_name].surfaces[surface_name].material_name
material = definitions_asset.retrieve_material(material_name, default = True)
effect = definitions_asset.retrieve_effect(material['effect'])
# Rules used: Generating tangents implies needing tangents
# Needing tangents implies needing normals
# Needing tangents/normals implies generating them if they aren't present
if material.meta('generate_tangents') or effect is not None and effect.meta('generate_tangents'):
generate_tangents = True
need_tangents.add(shape_name)
elif material.meta('tangents') or effect is not None and effect.meta('tangents'):
need_tangents.add(shape_name)
# Generate tangents if any material needs tangents and you haven't parsed any,
# or if any materials ask you to generate tangents
generate_tangents = generate_tangents or not len(self.tangents) or not len(self.binormals)
if material.meta('generate_normals') or effect is not None and effect.meta('generate_normals'):
generate_normals = True
need_normals.add(shape_name)
elif material.meta('normals') or effect is not None and effect.meta('normals'):
need_normals.add(shape_name)
# Same reasoning as with generating tangents
generate_normals = generate_normals or not len(self.normals)
if generate_tangents and 0 == len(self.uvs[0]):
LOG.debug("Can't generate nbts without uvs:%i", len(self.uvs[0]))
generate_tangents = False
need_tangents = set()
return (need_normals, generate_normals, need_tangents, generate_tangents)
# pylint: enable=R0904
#######################################################################################################################
def parse(input_filename="default.obj", output_filename="default.json", asset_url="", asset_root=".",
infiles=None, options=None):
"""Utility function to convert an OBJ file into a JSON file."""
definitions_asset = standard_include(infiles)
with open(input_filename, 'r') as source:
asset = Obj2json(basename(input_filename))
asset.parse(source)
# Remove any and all unused (e.g. default) shapes and surfaces
purge_empty(asset.shapes, recurseOnce = True)
# Generate primitives
asset.unpack_vertices()
# Remove any degenerate primitives unless they're requested to be kept
keep_degenerates = True
for shape in asset.shapes:
for _, surface in asset.shapes[shape].surfaces.iteritems():
material = definitions_asset.retrieve_material(surface.material_name)
if material.meta('keep_degenerates'):
keep_degenerates = True
if not keep_degenerates:
asset.remove_degenerate_primitives()
# Remove any unused vertices and calculate a bounding box
asset.remove_redundant_vertexes()
asset.generate_bbox()
# Generate normals/tangents if required
(need_normals, generate_normals,
need_tangents, generate_tangents) = asset.extract_nbt_options(definitions_asset)
if generate_tangents:
if generate_normals:
asset.generate_normals()
asset.generate_smooth_nbts()
asset.invert_v_texture_map()
elif generate_normals:
asset.generate_normals()
asset.smooth_normals()
json_asset = JsonAsset()
for shape_name in asset.shapes.iterkeys():
json_asset.attach_shape(shape_name)
node_name = NodeName("node-%s" % shape_name)
json_asset.attach_node(node_name)
#TODO: Should the following be divided into separate shapes?
json_asset.attach_positions(asset.positions, shape_name)
# Attach texture map, normals and tangents/binormals if required
if len(asset.uvs[0]) != 0:
json_asset.attach_uvs(asset.uvs[0], shape_name)
if shape_name in need_tangents:
if len(asset.tangents):
# Needing tangents implies needing normals and binormals
json_asset.attach_nbts(asset.normals, asset.tangents, asset.binormals, shape_name)
else:
LOG.error('tangents requested for shape %s, but no tangents or uvs available!', shape_name)
elif shape_name in need_normals:
json_asset.attach_normals(asset.normals, shape_name)
for surface_name, surface in asset.shapes[shape_name].surfaces.iteritems():
material = definitions_asset.retrieve_material(surface.material_name)
effect = material.get('effect', DEFAULT_EFFECT_NAME)
effect_name = "effect-%s" % shape_name
material_name = "material-%s" % surface.material_name
instance_name = "instance-%s-%s" % (shape_name, surface_name)
json_asset.attach_effect(effect_name, effect)
mat_params = material.get('parameters', None)
json_asset.attach_material(material_name, effect=effect, parameters=mat_params)
def textures(mat_params):
for k, v in mat_params.iteritems():
# If a paramater of a material has a string value, it is assumed to be a texture definition
if isinstance(v, basestring):
# Return the type of the texture (e.g. 'diffuse')
yield k
for t_type in textures(mat_params):
json_asset.attach_texture(material_name, t_type, mat_params[t_type])
first = surface.first
last = first + surface.count
json_asset.attach_surface(asset.primitives[first:last], JsonAsset.SurfaceTriangles,
shape_name, name=surface_name)
json_asset.attach_node_shape_instance(node_name, instance_name, shape_name,
material_name, surface=surface_name)
json_asset.attach_bbox(asset.bbox)
standard_json_out(json_asset, output_filename, options)
return json_asset
if __name__ == "__main__":
standard_main(parse, __version__,
"Convert LightWave (.obj) OBJ2 files into a Turbulenz JSON asset. Supports generating NBTs.",
__dependencies__)
| 47.715217 | 119 | 0.58112 |
acf4c7d2d9161eee5bf5930634ce169a033b1675 | 1,917 | py | Python | DQM/HLXMonitor/test/hlx_dqm_sourceclient_vme22_cfg.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 3 | 2018-08-24T19:10:26.000Z | 2019-02-19T11:45:32.000Z | DQM/HLXMonitor/test/hlx_dqm_sourceclient_vme22_cfg.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 7 | 2016-07-17T02:34:54.000Z | 2019-08-13T07:58:37.000Z | DQM/HLXMonitor/test/hlx_dqm_sourceclient_vme22_cfg.py | NTrevisani/cmssw | a212a27526f34eb9507cf8b875c93896e6544781 | [
"Apache-2.0"
] | 5 | 2018-08-21T16:37:52.000Z | 2020-01-09T13:33:17.000Z | import FWCore.ParameterSet.Config as cms
process = cms.Process("hlxdqmlive")
process.load("DQMServices.Components.MessageLogger_cfi")
process.load("DQM.HLXMonitor.hlx_dqm_sourceclient_vme22_cfi")
## For private server vme22 use an empty source
process.source = cms.Source("EmptySource")
## For testing dqmEnv ... for online
##process.load("DQM.Integration.test.inputsource_cfi")
##process.EventStreamHttpReader.consumerName = 'HLX DQM Consumer'
##process.EventStreamHttpReader.sourceURL = cms.string('http://srv-c2d05-05:50082/urn:xdaq-application:lid=29')
process.load("DQM.Integration.test.environment_cfi")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(-1)
)
process.hlxQualityTester = cms.EDAnalyzer("QualityTester",
# default is 1
prescaleFactor = cms.untracked.int32(10000),
# use eventloop for testing only ! default is false
# untracked bool testInEventloop = false
# use qtest on endRun or endJob
# untracked bool qtestOnEndRun = false
# untracked bool qtestOnEndJob = false
qtList = cms.untracked.FileInPath('DQM/HLXMonitor/test/HLXQualityTests.xml')
)
##process.p = cms.Path(process.hlxdqmsource*process.hlxQualityTester*process.dqmSaver)
##process.p = cms.Path(process.hlxdqmsource*process.hlxQualityTester*process.dqmEnv*process.dqmSaver)
process.p = cms.Path(process.hlxdqmsource*process.hlxQualityTester)
process.hlxdqmsource.outputDir = '/opt/dqm/data/live'
process.hlxdqmsource.PrimaryHLXDAQIP = 'vmepcs2f17-22'
process.hlxdqmsource.SecondaryHLXDAQIP = 'vmepcs2f17-19'
process.hlxdqmsource.SourcePort = 51007
process.DQM.collectorHost = 'localhost'
process.DQM.collectorPort = 9190
process.DQMStore.verbose = 0
process.dqmEnv.subSystemFolder = 'HLX'
process.dqmSaver.dirName = '/opt/dqm/data/tmp'
##process.dqmSaver.saveByRun = 1
process.dqmSaver.saveAtJobEnd = True
process.dqmSaver.saveByTime = 4
process.dqmSaver.saveByMinute = 8
| 38.34 | 111 | 0.784559 |
acf4c8391eb6c8300ac4505b6aa29f0e7d5f73ed | 102,276 | py | Python | src/sqlfluff/dialects/dialect_postgres.py | floydt/sqlfluff | 68f479a53cfcabe67379dfb96539e516bf80d3c8 | [
"MIT"
] | 1 | 2021-12-29T18:34:20.000Z | 2021-12-29T18:34:20.000Z | src/sqlfluff/dialects/dialect_postgres.py | AI-App/SQLFluff | 64a2dc62e12712e3ee40d088413f1c209f8980fe | [
"MIT"
] | null | null | null | src/sqlfluff/dialects/dialect_postgres.py | AI-App/SQLFluff | 64a2dc62e12712e3ee40d088413f1c209f8980fe | [
"MIT"
] | null | null | null | """The PostgreSQL dialect."""
from sqlfluff.core.parser import (
OneOf,
AnyNumberOf,
Ref,
Sequence,
Bracketed,
OptionallyBracketed,
Anything,
BaseSegment,
Delimited,
RegexLexer,
RegexParser,
CodeSegment,
NamedParser,
SymbolSegment,
StartsWith,
CommentSegment,
Dedent,
SegmentGenerator,
)
from sqlfluff.core.dialects import load_raw_dialect
from sqlfluff.dialects.dialect_postgres_keywords import (
postgres_keywords,
get_keywords,
postgres_postgis_datatype_keywords,
)
ansi_dialect = load_raw_dialect("ansi")
postgres_dialect = ansi_dialect.copy_as("postgres")
postgres_dialect.insert_lexer_matchers(
# JSON Operators: https://www.postgresql.org/docs/9.5/functions-json.html
[
# Explanation for the regex
# - (?s) Switch - .* includes newline characters
# - U& - must start with U&
# - (('')+?(?!')|('.*?(?<!')(?:'')*'(?!')))
# ('')+? Any non-zero number of pairs of single quotes -
# (?!') that are not then followed by a single quote
# | OR
# ('.*?(?<!')(?:'')*'(?!'))
# '.*? A single quote followed by anything (non-greedy)
# (?<!')(?:'')* Any even number of single quotes, including zero
# '(?!') Followed by a single quote, which is not followed by a single quote
# - (\s*UESCAPE\s*'[^0-9A-Fa-f'+\-\s)]')?
# \s*UESCAPE\s* Whitespace, followed by UESCAPE, followed by whitespace
# '[^0-9A-Fa-f'+\-\s)]' Any character that isn't A-F, a-f, 0-9, +-, or whitespace, in quotes
# ? This last block is optional
RegexLexer(
"unicode_single_quote",
r"(?s)U&(('')+?(?!')|('.*?(?<!')(?:'')*'(?!')))(\s*UESCAPE\s*'[^0-9A-Fa-f'+\-\s)]')?",
CodeSegment,
),
# This is similar to the Unicode regex, the key differences being:
# - E - must start with E
# - The final quote character must be preceded by:
# (?<!\\)(?:\\\\)*(?<!')(?:'') An even/zero number of \ followed by an even/zero number of '
# OR
# (?<!\\)(?:\\\\)*\\(?<!')(?:'')*' An odd number of \ followed by an odd number of '
# There is no UESCAPE block
RegexLexer(
"escaped_single_quote",
r"(?s)E(('')+?(?!')|'.*?((?<!\\)(?:\\\\)*(?<!')(?:'')*|(?<!\\)(?:\\\\)*\\(?<!')(?:'')*')'(?!'))",
CodeSegment,
),
# Double quote Unicode string cannot be empty, and have no single quote escapes
RegexLexer(
"unicode_double_quote",
r'(?s)U&".+?"(\s*UESCAPE\s*\'[^0-9A-Fa-f\'+\-\s)]\')?',
CodeSegment,
),
RegexLexer(
"json_operator",
r"->>|#>>|->|#>|@>|<@|\?\||\?|\?&|#-",
CodeSegment,
),
],
before="not_equal",
)
postgres_dialect.insert_lexer_matchers(
[
# Explanation for the regex
# \\([^(\\\r\n)])+((\\\\)|(?=\n)|(?=\r\n))?
# \\ Starts with backslash
# ([^\\\r\n])+ Anything that is not a newline or a backslash
# (
# (\\\\) Double backslash
# | OR
# (?=\n) The next character is a newline
# | OR
# (?=\r\n) The next 2 characters are a carriage return and a newline
# )
# ? The previous clause is optional
RegexLexer(
# For now we'll just treat meta syntax like comments and so just ignore them.
# In future we may want to enhance this to actually parse them to ensure they are
# valid meta commands.
"meta_command",
r"\\([^\\\r\n])+((\\\\)|(?=\n)|(?=\r\n))?",
CommentSegment,
)
],
before="code", # Final thing to search for - as psql specific
)
postgres_dialect.patch_lexer_matchers(
[
# Patching comments to remove hash comments
RegexLexer(
"inline_comment",
r"(--)[^\n]*",
CommentSegment,
segment_kwargs={"trim_start": ("--")},
),
# In Postgres, the only escape character is ' for single quote strings
RegexLexer(
"single_quote", r"(?s)('')+?(?!')|('.*?(?<!')(?:'')*'(?!'))", CodeSegment
),
# In Postgres, there is no escape character for double quote strings
RegexLexer("double_quote", r'(?s)".+?"', CodeSegment),
RegexLexer("code", r"[0-9a-zA-Z_]+[0-9a-zA-Z_$]*", CodeSegment),
]
)
postgres_dialect.sets("reserved_keywords").update(
get_keywords(postgres_keywords, "reserved")
)
postgres_dialect.sets("unreserved_keywords").update(
get_keywords(postgres_keywords, "non-reserved")
)
postgres_dialect.sets("reserved_keywords").difference_update(
get_keywords(postgres_keywords, "not-keyword")
)
postgres_dialect.sets("unreserved_keywords").difference_update(
get_keywords(postgres_keywords, "not-keyword")
)
# Add datetime units
postgres_dialect.sets("datetime_units").update(
[
"CENTURY",
"DECADE",
"DOW",
"DOY",
"EPOCH",
"ISODOW",
"ISOYEAR",
"MICROSECONDS",
"MILLENNIUM",
"MILLISECONDS",
"TIMEZONE",
"TIMEZONE_HOUR",
"TIMEZONE_MINUTE",
]
)
postgres_dialect.add(
JsonOperatorSegment=NamedParser(
"json_operator", SymbolSegment, name="json_operator", type="binary_operator"
),
DollarQuotedLiteralSegment=NamedParser(
"dollar_quote", CodeSegment, name="dollar_quoted_literal", type="literal"
),
SimpleGeometryGrammar=AnyNumberOf(Ref("NumericLiteralSegment")),
)
postgres_dialect.replace(
ComparisonOperatorGrammar=OneOf(
Ref("EqualsSegment"),
Ref("GreaterThanSegment"),
Ref("LessThanSegment"),
Ref("GreaterThanOrEqualToSegment"),
Ref("LessThanOrEqualToSegment"),
Ref("NotEqualToSegment_a"),
Ref("NotEqualToSegment_b"),
Ref("LikeOperatorSegment"),
Sequence("IS", "DISTINCT", "FROM"),
Sequence("IS", "NOT", "DISTINCT", "FROM"),
),
NakedIdentifierSegment=SegmentGenerator(
# Generate the anti template from the set of reserved keywords
lambda dialect: RegexParser(
# Can’t begin with $, must only contain digits, letters, underscore it $ but can’t be all digits.
r"([A-Z_]+|[0-9]+[A-Z_$])[A-Z0-9_$]*",
CodeSegment,
name="naked_identifier",
type="identifier",
anti_template=r"^(" + r"|".join(dialect.sets("reserved_keywords")) + r")$",
)
),
ParameterNameSegment=RegexParser(
r"[A-Z_][A-Z0-9_$]*", CodeSegment, name="parameter", type="parameter"
),
FunctionNameIdentifierSegment=RegexParser(
r"[A-Z_][A-Z0-9_$]*",
CodeSegment,
name="function_name_identifier",
type="function_name_identifier",
),
QuotedLiteralSegment=OneOf(
NamedParser("single_quote", CodeSegment, name="quoted_literal", type="literal"),
NamedParser(
"unicode_single_quote", CodeSegment, name="quoted_literal", type="literal"
),
NamedParser(
"escaped_single_quote", CodeSegment, name="quoted_literal", type="literal"
),
),
QuotedIdentifierSegment=OneOf(
NamedParser(
"double_quote", CodeSegment, name="quoted_identifier", type="identifier"
),
NamedParser(
"unicode_double_quote", CodeSegment, name="quoted_literal", type="literal"
),
),
PostFunctionGrammar=OneOf(
Ref("WithinGroupClauseSegment"),
Ref("OverClauseSegment"),
# Filter clause supported by both Postgres and SQLite
Ref("FilterClauseGrammar"),
),
BinaryOperatorGrammar=OneOf(
Ref("ArithmeticBinaryOperatorGrammar"),
Ref("StringBinaryOperatorGrammar"),
Ref("BooleanBinaryOperatorGrammar"),
Ref("ComparisonOperatorGrammar"),
# Add JSON operators
Ref("JsonOperatorSegment"),
),
FunctionParameterGrammar=Sequence(
OneOf("IN", "OUT", "INOUT", "VARIADIC", optional=True),
OneOf(
Ref("DatatypeSegment"),
Sequence(Ref("ParameterNameSegment"), Ref("DatatypeSegment")),
),
Sequence(
OneOf("DEFAULT", Ref("EqualsSegment")), Ref("LiteralGrammar"), optional=True
),
),
FrameClauseUnitGrammar=OneOf("RANGE", "ROWS", "GROUPS"),
# In Postgres, column references may be followed by a time zone cast in all cases.
# For more information, see https://www.postgresql.org/docs/11/functions-datetime.html
ColumnReferenceSegment=Sequence(
ansi_dialect.get_segment("ColumnReferenceSegment"),
Ref("ArrayAccessorSegment", optional=True),
Ref("TimeZoneGrammar", optional=True),
),
# Postgres supports the non-standard ISNULL and NONNULL comparison operators. See
# https://www.postgresql.org/docs/14/functions-comparison.html
IsNullGrammar=Ref.keyword("ISNULL"),
NotNullGrammar=Ref.keyword("NOTNULL"),
JoinKeywords=Sequence("JOIN", Sequence("LATERAL", optional=True)),
SelectClauseElementTerminatorGrammar=OneOf(
"INTO",
"FROM",
"WHERE",
Sequence("ORDER", "BY"),
"LIMIT",
Ref("CommaSegment"),
Ref("SetOperatorSegment"),
),
LiteralGrammar=OneOf(
Ref("QuotedLiteralSegment"),
Ref("NumericLiteralSegment"),
Ref("BooleanLiteralGrammar"),
Ref("QualifiedNumericLiteralSegment"),
# NB: Null is included in the literals, because it is a keyword which
# can otherwise be easily mistaken for an identifier.
Ref("NullLiteralSegment"),
Ref("DateTimeLiteralGrammar"),
Ref("PsqlVariableGrammar"),
Sequence(Ref("SimpleArrayTypeGrammar"), Ref("ArrayLiteralSegment")),
),
SimpleArrayTypeGrammar=Ref.keyword("ARRAY"),
)
@postgres_dialect.segment()
class PsqlVariableGrammar(BaseSegment):
"""PSQl Variables :thing, :'thing', :"thing"."""
type = "psql_variable"
match_grammar = Sequence(
OptionallyBracketed(
Ref("ColonSegment"),
OneOf(
Ref("ParameterNameSegment"),
Ref("QuotedLiteralSegment"),
Ref("QuotedIdentifierSegment"),
),
)
)
@postgres_dialect.segment()
class TimeZoneGrammar(BaseSegment):
"""Literal Date Time with optional casting to Time Zone."""
type = "time_zone_grammar"
match_grammar = AnyNumberOf(
Sequence("AT", "TIME", "ZONE", Ref("QuotedLiteralSegment")),
)
@postgres_dialect.segment(replace=True)
class ArrayAccessorSegment(BaseSegment):
"""Overwrites Array Accessor in ANSI to allow n many consecutive brackets."""
type = "array_accessor"
match_grammar = Sequence(
AnyNumberOf(
Bracketed(
Sequence(
OneOf(
Ref("QualifiedNumericLiteralSegment"),
Ref("NumericLiteralSegment"),
),
Sequence(
Ref("SliceSegment"),
OneOf(
Ref("QualifiedNumericLiteralSegment"),
Ref("NumericLiteralSegment"),
),
optional=True,
),
optional=True,
),
bracket_type="square",
)
)
)
@postgres_dialect.segment()
class SimpleArrayContentsGrammar(BaseSegment):
"""This Grammar is Literals in Square Brackets, comma delimited."""
type = "simple_array_contents_grammar"
match_grammar = Bracketed(Delimited(Ref("LiteralGrammar")), bracket_type="square")
@postgres_dialect.segment(replace=True)
class ArrayLiteralSegment(BaseSegment):
"""Overwrites ANSI to allow for nested Arrays."""
type = "array_contents_grammar"
match_grammar = Sequence(
OneOf(
Ref("SimpleArrayContentsGrammar"),
Bracketed(Delimited(Ref("ArrayLiteralSegment")), bracket_type="square"),
)
)
@postgres_dialect.segment()
class DateTimeTypeIdentifier(BaseSegment):
"""Date Time Type."""
type = "datetime_type_identifier"
match_grammar = OneOf(
"DATE",
Sequence(
OneOf("TIME", "TIMESTAMP"),
Bracketed(Ref("NumericLiteralSegment"), optional=True),
Sequence(OneOf("WITH", "WITHOUT"), "TIME", "ZONE", optional=True),
),
Sequence("TIMESTAMPTZ", Bracketed(Ref("NumericLiteralSegment"), optional=True)),
"INTERVAL",
)
@postgres_dialect.segment(replace=True)
class DateTimeLiteralGrammar(BaseSegment):
"""Literal Date Time with optional casting to Time Zone."""
type = "datetime_literal"
match_grammar = Sequence(
Ref("DateTimeTypeIdentifier"),
Ref("QuotedLiteralSegment"),
Ref("TimeZoneGrammar", optional=True),
)
@postgres_dialect.segment(replace=True)
class DatatypeSegment(BaseSegment):
"""A data type segment.
Supports timestamp with(out) time zone. Doesn't currently support intervals.
"""
type = "data_type"
match_grammar = OneOf(
Ref("WellKnownTextGeometrySegment"),
Sequence(
Ref("DateTimeTypeIdentifier"),
Ref("TimeZoneGrammar", optional=True),
),
Sequence(
OneOf(
Sequence("DOUBLE", "PRECISION"),
Sequence(
OneOf("CHARACTER", "BINARY"),
OneOf("VARYING", Sequence("LARGE", "OBJECT")),
),
Sequence(
# Some dialects allow optional qualification of data types with schemas
Sequence(
Ref("SingleIdentifierGrammar"),
Ref("DotSegment"),
allow_gaps=False,
optional=True,
),
Ref("DatatypeIdentifierSegment"),
OneOf(
Ref("ArrayAccessorSegment"),
Sequence(
Ref("SimpleArrayTypeGrammar"), Ref("ArrayLiteralSegment")
),
optional=True,
),
allow_gaps=False,
),
),
Bracketed(
OneOf(
Delimited(Ref("ExpressionSegment")),
# The brackets might be empty for some cases...
optional=True,
),
# There may be no brackets for some data types
optional=True,
),
Ref("CharCharacterSetSegment", optional=True),
),
)
@postgres_dialect.segment(replace=True)
class CreateFunctionStatementSegment(BaseSegment):
"""A `CREATE FUNCTION` statement.
This version in the ANSI dialect should be a "common subset" of the
structure of the code for those dialects.
postgres: https://www.postgresql.org/docs/13/sql-createfunction.html
"""
type = "create_function_statement"
match_grammar = Sequence(
"CREATE",
Sequence("OR", "REPLACE", optional=True),
Ref("TemporaryGrammar", optional=True),
"FUNCTION",
Anything(),
)
parse_grammar = Sequence(
"CREATE",
Sequence("OR", "REPLACE", optional=True),
Ref("TemporaryGrammar", optional=True),
"FUNCTION",
Sequence("IF", "NOT", "EXISTS", optional=True),
Ref("FunctionNameSegment"),
Ref("FunctionParameterListGrammar"),
Sequence( # Optional function return type
"RETURNS",
OneOf(
Sequence(
"TABLE",
Bracketed(
Delimited(
OneOf(
Ref("DatatypeSegment"),
Sequence(
Ref("ParameterNameSegment"), Ref("DatatypeSegment")
),
),
delimiter=Ref("CommaSegment"),
)
),
optional=True,
),
Sequence(
"SETOF",
Ref("DatatypeSegment"),
),
Ref("DatatypeSegment"),
),
optional=True,
),
Ref("FunctionDefinitionGrammar"),
)
@postgres_dialect.segment()
class DropFunctionStatementSegment(BaseSegment):
"""A `DROP FUNCTION` statement.
As per the specification: https://www.postgresql.org/docs/14/sql-dropfunction.html
"""
type = "drop_function_statement"
match_grammar = Sequence(
"DROP",
"FUNCTION",
Ref("IfExistsGrammar", optional=True),
Delimited(
Sequence(
Ref("FunctionNameSegment"),
Ref("FunctionParameterListGrammar", optional=True),
)
),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment()
class AlterFunctionStatementSegment(BaseSegment):
"""A `ALTER FUNCTION` statement.
As per the specification: https://www.postgresql.org/docs/14/sql-alterfunction.html
"""
type = "alter_function_statement"
match_grammar = StartsWith(Sequence("ALTER", "FUNCTION"))
parse_grammar = Sequence(
"ALTER",
"FUNCTION",
Delimited(
Sequence(
Ref("FunctionNameSegment"),
Ref("FunctionParameterListGrammar", optional=True),
)
),
OneOf(
Ref("AlterFunctionActionSegment", optional=True),
Sequence("RENAME", "TO", Ref("FunctionNameSegment")),
Sequence("SET", "SCHEMA", Ref("SchemaReferenceSegment")),
Sequence(
"OWNER",
"TO",
OneOf(
OneOf(Ref("ParameterNameSegment"), Ref("QuotedIdentifierSegment")),
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
),
),
Sequence(
Ref.keyword("NO", optional=True),
"DEPENDS",
"ON",
"EXTENSION",
),
),
)
@postgres_dialect.segment()
class AlterFunctionActionSegment(BaseSegment):
"""Alter Function Action Segment.
Matches the definition of action in https://www.postgresql.org/docs/14/sql-alterfunction.html
"""
type = "alter_function_action_segment"
match_grammar = Sequence(
OneOf(
OneOf(
Sequence("CALLED", "ON", "NULL", "INPUT"),
Sequence("RETURNS", "NULL", "ON", "NULL", "INPUT"),
"STRICT",
),
OneOf("IMMUTABLE", "STABLE", "VOLATILE"),
Sequence(Ref.keyword("NOT", optional=True), "LEAKPROOF"),
Sequence(
Ref.keyword("EXTERNAL", optional=True),
"SECURITY",
OneOf("DEFINER", "INVOKER"),
),
Sequence("PARALLEL", OneOf("UNSAFE", "RESTRICTED", "SAFE")),
Sequence("COST", Ref("NumericLiteralSegment")),
Sequence("ROWS", Ref("NumericLiteralSegment")),
Sequence("SUPPORT", Ref("ParameterNameSegment")),
Sequence(
"SET",
Ref("ParameterNameSegment"),
OneOf(
Sequence(
OneOf("TO", Ref("EqualsSegment")),
OneOf(
Ref("LiteralGrammar"),
Ref("NakedIdentifierSegment"),
"DEFAULT",
),
),
Sequence("FROM", "CURRENT"),
),
),
Sequence("RESET", OneOf("ALL", Ref("ParameterNameSegment"))),
),
Ref.keyword("RESTRICT", optional=True),
)
@postgres_dialect.segment()
class WellKnownTextGeometrySegment(BaseSegment):
"""A Data Type Segment to identify Well Known Text Geometric Data Types.
As specified in https://postgis.net/stuff/postgis-3.1.pdf
This approach is to maximise 'accepted code' for the parser, rather than be overly restrictive.
"""
type = "wkt_geometry_type"
_geometry_type_keywords = [x[0] for x in postgres_postgis_datatype_keywords]
match_grammar = OneOf(
Sequence(
OneOf(*_geometry_type_keywords),
Bracketed(
Delimited(
OptionallyBracketed(Delimited(Ref("SimpleGeometryGrammar"))),
# 2D Arrays of coordinates - to specify surfaces
Bracketed(
Delimited(Bracketed(Delimited(Ref("SimpleGeometryGrammar"))))
),
Ref("WellKnownTextGeometrySegment"),
)
),
),
Sequence(
OneOf("GEOMETRY", "GEOGRAPHY"),
Bracketed(
Sequence(
OneOf(*_geometry_type_keywords, "GEOMETRY", "GEOGRAPHY"),
Ref("CommaSegment"),
Ref("NumericLiteralSegment"),
)
),
),
)
@postgres_dialect.segment(replace=True)
class FunctionDefinitionGrammar(BaseSegment):
"""This is the body of a `CREATE FUNCTION AS` statement.
Options supported as defined in https://www.postgresql.org/docs/13/sql-createfunction.html
"""
match_grammar = Sequence(
AnyNumberOf(
Sequence("LANGUAGE", Ref("ParameterNameSegment")),
Sequence("TRANSFORM", "FOR", "TYPE", Ref("ParameterNameSegment")),
Ref.keyword("WINDOW"),
OneOf("IMMUTABLE", "STABLE", "VOLATILE"),
Sequence(Ref.keyword("NOT", optional=True), "LEAKPROOF"),
OneOf(
Sequence("CALLED", "ON", "NULL", "INPUT"),
Sequence("RETURNS", "NULL", "ON", "NULL", "INPUT"),
"STRICT",
),
Sequence(
Ref.keyword("EXTERNAL", optional=True),
"SECURITY",
OneOf("INVOKER", "DEFINER"),
),
Sequence("PARALLEL", OneOf("UNSAFE", "RESTRICTED", "SAFE")),
Sequence("COST", Ref("NumericLiteralSegment")),
Sequence("ROWS", Ref("NumericLiteralSegment")),
Sequence("SUPPORT", Ref("ParameterNameSegment")),
Sequence(
"SET",
Ref("ParameterNameSegment"),
OneOf(
Sequence(
OneOf("TO", Ref("EqualsSegment")),
Delimited(
OneOf(
Ref("ParameterNameSegment"),
Ref("LiteralGrammar"),
),
delimiter=Ref("CommaSegment"),
),
),
Sequence("FROM", "CURRENT"),
),
),
Sequence(
"AS",
OneOf(
Ref("QuotedLiteralSegment"),
Ref("DollarQuotedLiteralSegment"),
Sequence(
Ref("QuotedLiteralSegment"),
Ref("CommaSegment"),
Ref("QuotedLiteralSegment"),
),
),
),
),
Sequence(
"WITH",
Bracketed(
Delimited(Ref("ParameterNameSegment"), delimiter=Ref("CommaSegment"))
),
optional=True,
),
)
@postgres_dialect.segment()
class IntoClauseSegment(BaseSegment):
"""Into Clause Segment.
As specified in https://www.postgresql.org/docs/14/sql-selectinto.html
"""
type = "into_clause"
match_grammar = Sequence(
"INTO",
OneOf("TEMPORARY", "TEMP", "UNLOGGED", optional=True),
Ref.keyword("TABLE", optional=True),
Ref("TableReferenceSegment"),
)
@postgres_dialect.segment(replace=True)
class UnorderedSelectStatementSegment(BaseSegment):
"""Overrides ANSI Statement, to allow for SELECT INTO statements."""
type = "select_statement"
match_grammar = ansi_dialect.get_segment(
"UnorderedSelectStatementSegment"
).match_grammar.copy()
parse_grammar = Sequence(
Ref("SelectClauseSegment"),
# Dedent for the indent in the select clause.
# It's here so that it can come AFTER any whitespace.
Dedent,
Ref("IntoClauseSegment", optional=True),
Ref("FromClauseSegment", optional=True),
Ref("WhereClauseSegment", optional=True),
Ref("GroupByClauseSegment", optional=True),
Ref("HavingClauseSegment", optional=True),
Ref("OverlapsClauseSegment", optional=True),
)
@postgres_dialect.segment(replace=True)
class SelectStatementSegment(BaseSegment):
"""Overrides ANSI as the parse grammar copy needs to be reapplied."""
type = "select_statement"
match_grammar = ansi_dialect.get_segment(
"SelectStatementSegment"
).match_grammar.copy()
parse_grammar = postgres_dialect.get_segment(
"UnorderedSelectStatementSegment"
).parse_grammar.copy(
insert=[
Ref("OrderByClauseSegment", optional=True),
Ref("LimitClauseSegment", optional=True),
Ref("NamedWindowSegment", optional=True),
]
)
@postgres_dialect.segment(replace=True)
class SelectClauseSegment(BaseSegment):
"""Overrides ANSI to allow INTO as a terminator."""
type = "select_clause"
match_grammar = StartsWith(
Sequence("SELECT", Ref("WildcardExpressionSegment", optional=True)),
terminator=OneOf(
"INTO",
"FROM",
"WHERE",
Sequence("ORDER", "BY"),
"LIMIT",
"OVERLAPS",
Ref("SetOperatorSegment"),
),
enforce_whitespace_preceding_terminator=True,
)
parse_grammar = Ref("SelectClauseSegmentGrammar")
@postgres_dialect.segment(replace=True)
class SelectClauseModifierSegment(BaseSegment):
"""Things that come after SELECT but before the columns."""
type = "select_clause_modifier"
match_grammar = OneOf(
Sequence("DISTINCT", Sequence("ON", Bracketed(Anything()), optional=True)),
"ALL",
)
parse_grammar = OneOf(
Sequence(
"DISTINCT",
Sequence(
"ON",
Bracketed(
Delimited(Ref("ExpressionSegment"), delimiter=Ref("CommaSegment"))
),
optional=True,
),
),
"ALL",
)
@postgres_dialect.segment()
class WithinGroupClauseSegment(BaseSegment):
"""An WITHIN GROUP clause for window functions.
https://www.postgresql.org/docs/current/functions-aggregate.html.
"""
type = "withingroup_clause"
match_grammar = Sequence(
"WITHIN",
"GROUP",
Bracketed(Anything(optional=True)),
)
parse_grammar = Sequence(
"WITHIN",
"GROUP",
Bracketed(Ref("OrderByClauseSegment", optional=True)),
)
@postgres_dialect.segment(replace=True)
class CreateRoleStatementSegment(BaseSegment):
"""A `CREATE ROLE` statement.
As per:
https://www.postgresql.org/docs/current/sql-createrole.html
"""
type = "create_role_statement"
match_grammar = ansi_dialect.get_segment(
"CreateRoleStatementSegment"
).match_grammar.copy(
insert=[
Sequence(
Ref.keyword("WITH", optional=True),
# Very permissive for now. Anything can go here.
Anything(),
)
],
)
@postgres_dialect.segment(replace=True)
class ExplainStatementSegment(ansi_dialect.get_segment("ExplainStatementSegment")): # type: ignore
"""An `Explain` statement.
EXPLAIN [ ( option [, ...] ) ] statement
EXPLAIN [ ANALYZE ] [ VERBOSE ] statement
https://www.postgresql.org/docs/9.1/sql-explain.html
"""
parse_grammar = Sequence(
"EXPLAIN",
OneOf(
Sequence(
Ref.keyword("ANALYZE", optional=True),
Ref.keyword("VERBOSE", optional=True),
),
Bracketed(
Delimited(Ref("ExplainOptionSegment"), delimiter=Ref("CommaSegment"))
),
optional=True,
),
ansi_dialect.get_segment("ExplainStatementSegment").explainable_stmt,
)
@postgres_dialect.segment()
class ExplainOptionSegment(BaseSegment):
"""An `Explain` statement option.
ANALYZE [ boolean ]
VERBOSE [ boolean ]
COSTS [ boolean ]
BUFFERS [ boolean ]
FORMAT { TEXT | XML | JSON | YAML }
https://www.postgresql.org/docs/9.1/sql-explain.html
"""
type = "explain_option"
flag_segment = Sequence(
OneOf("ANALYZE", "VERBOSE", "COSTS", "BUFFERS"),
OneOf(Ref("TrueSegment"), Ref("FalseSegment"), optional=True),
)
match_grammar = OneOf(
flag_segment,
Sequence(
"FORMAT",
OneOf("TEXT", "XML", "JSON", "YAML"),
),
)
@postgres_dialect.segment(replace=True)
class CreateTableStatementSegment(BaseSegment):
"""A `CREATE TABLE` statement.
As specified in https://www.postgresql.org/docs/13/sql-createtable.html
"""
type = "create_table_statement"
match_grammar = Sequence(
"CREATE",
OneOf(
Sequence(
OneOf("GLOBAL", "LOCAL", optional=True),
Ref("TemporaryGrammar", optional=True),
),
"UNLOGGED",
optional=True,
),
"TABLE",
Ref("IfNotExistsGrammar", optional=True),
Ref("TableReferenceSegment"),
OneOf(
# Columns and comment syntax:
Sequence(
Bracketed(
Delimited(
OneOf(
Sequence(
Ref("ColumnReferenceSegment"),
Ref("DatatypeSegment"),
Sequence(
"COLLATE",
Ref("QuotedLiteralSegment"),
optional=True,
),
AnyNumberOf(
Ref("ColumnConstraintSegment", optional=True)
),
),
Ref("TableConstraintSegment"),
Sequence(
"LIKE",
Ref("TableReferenceSegment"),
AnyNumberOf(Ref("LikeOptionSegment"), optional=True),
),
),
)
),
Sequence(
"INHERITS",
Bracketed(
Delimited(
Ref("TableReferenceSegment"), delimiter=Ref("CommaSegment")
)
),
optional=True,
),
),
# Create OF syntax:
Sequence(
"OF",
Ref("ParameterNameSegment"),
Bracketed(
Delimited(
Sequence(
Ref("ColumnReferenceSegment"),
Sequence("WITH", "OPTIONS", optional=True),
AnyNumberOf(Ref("ColumnConstraintSegment")),
),
Ref("TableConstraintSegment"),
delimiter=Ref("CommaSegment"),
),
optional=True,
),
),
# Create PARTITION OF syntax
Sequence(
"PARTITION",
"OF",
Ref("TableReferenceSegment"),
Bracketed(
Delimited(
Sequence(
Ref("ColumnReferenceSegment"),
Sequence("WITH", "OPTIONS", optional=True),
AnyNumberOf(Ref("ColumnConstraintSegment")),
),
Ref("TableConstraintSegment"),
delimiter=Ref("CommaSegment"),
),
optional=True,
),
OneOf(
Sequence("FOR", "VALUES", Ref("PartitionBoundSpecSegment")),
"DEFAULT",
),
),
),
AnyNumberOf(
Sequence(
"PARTITION",
"BY",
OneOf("RANGE", "LIST", "HASH"),
Bracketed(
AnyNumberOf(
Delimited(
Sequence(
OneOf(
Ref("ColumnReferenceSegment"),
Ref("FunctionSegment"),
),
AnyNumberOf(
Sequence(
"COLLATE",
Ref("QuotedLiteralSegment"),
optional=True,
),
Ref("ParameterNameSegment", optional=True),
),
),
delimiter=Ref("CommaSegment"),
)
)
),
),
Sequence("USING", Ref("ParameterNameSegment")),
OneOf(
Sequence(
"WITH",
Bracketed(
AnyNumberOf(
Sequence(
Ref("ParameterNameSegment"),
Sequence(
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
optional=True,
),
)
)
),
),
Sequence("WITHOUT", "OIDS"),
),
Sequence(
"ON",
"COMMIT",
OneOf(Sequence("PRESERVE", "ROWS"), Sequence("DELETE", "ROWS"), "DROP"),
),
Sequence("TABLESPACE", Ref("TableReferenceSegment")),
),
)
@postgres_dialect.segment()
class CreateTableAsStatementSegment(BaseSegment):
"""A `CREATE TABLE AS` statement.
As specified in https://www.postgresql.org/docs/13/sql-createtableas.html
"""
type = "create_table_as_statement"
match_grammar = Sequence(
"CREATE",
OneOf(
Sequence(
OneOf("GLOBAL", "LOCAL", optional=True),
Ref("TemporaryGrammar"),
),
"UNLOGGED",
optional=True,
),
"TABLE",
Ref("IfNotExistsGrammar", optional=True),
Ref("TableReferenceSegment"),
AnyNumberOf(
Sequence(
Bracketed(
Delimited(Ref("ColumnReferenceSegment")),
),
optional=True,
),
Sequence("USING", Ref("ParameterNameSegment"), optional=True),
OneOf(
Sequence(
"WITH",
Bracketed(
AnyNumberOf(
Sequence(
Ref("ParameterNameSegment"),
Sequence(
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
optional=True,
),
)
)
),
),
Sequence("WITHOUT", "OIDS"),
optional=True,
),
Sequence(
"ON",
"COMMIT",
OneOf(Sequence("PRESERVE", "ROWS"), Sequence("DELETE", "ROWS"), "DROP"),
optional=True,
),
Sequence("TABLESPACE", Ref("ParameterNameSegment"), optional=True),
),
"AS",
OneOf(
OptionallyBracketed(Ref("SelectableGrammar")),
OptionallyBracketed(Sequence("TABLE", Ref("TableReferenceSegment"))),
Ref("ValuesClauseSegment"),
OptionallyBracketed(Sequence("EXECUTE", Ref("FunctionSegment"))),
),
Ref("WithDataClauseSegment", optional=True),
)
@postgres_dialect.segment(replace=True)
class AlterTableStatementSegment(BaseSegment):
"""An `ALTER TABLE` statement.
Matches the definition in https://www.postgresql.org/docs/13/sql-altertable.html
"""
type = "alter_table_statement"
match_grammar = Sequence(
"ALTER",
"TABLE",
OneOf(
Sequence(
Sequence("IF", "EXISTS", optional=True),
Ref.keyword("ONLY", optional=True),
Ref("TableReferenceSegment"),
Ref("StarSegment", optional=True),
OneOf(
Delimited(
Ref("AlterTableActionSegment"), delimiter=Ref("CommaSegment")
),
Sequence(
"RENAME",
Ref.keyword("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
"TO",
Ref("ColumnReferenceSegment"),
),
Sequence(
"RENAME",
"CONSTRAINT",
Ref("ParameterNameSegment"),
"TO",
Ref("ParameterNameSegment"),
),
),
),
Sequence(
Sequence("IF", "EXISTS", optional=True),
Ref("TableReferenceSegment"),
OneOf(
Sequence("RENAME", "TO", Ref("TableReferenceSegment")),
Sequence("SET", "SCHEMA", Ref("SchemaReferenceSegment")),
Sequence(
"ATTACH",
"PARTITION",
Ref("ParameterNameSegment"),
OneOf(
Sequence("FOR", "VALUES", Ref("PartitionBoundSpecSegment")),
"DEFAULT",
),
),
Sequence(
"DETACH",
"PARTITION",
Ref("ParameterNameSegment"),
Ref.keyword("CONCURRENTLY", optional=True),
Ref.keyword("FINALIZE", optional=True),
),
),
),
Sequence(
"ALL",
"IN",
"TABLESPACE",
Ref("ParameterNameSegment"),
Sequence(
"OWNED",
"BY",
Delimited(
Ref("ObjectReferenceSegment"), delimiter=Ref("CommaSegment")
),
optional=True,
),
"SET",
"TABLESPACE",
Ref("ParameterNameSegment"),
Ref.keyword("NOWAIT", optional=True),
),
),
)
@postgres_dialect.segment()
class AlterTableActionSegment(BaseSegment):
"""Alter Table Action Segment.
Matches the definition of action in https://www.postgresql.org/docs/13/sql-altertable.html
"""
type = "alter_table_action_segment"
match_grammar = OneOf(
Sequence(
"ADD",
Ref.keyword("COLUMN", optional=True),
Sequence("IF", "NOT", "EXISTS", optional=True),
Ref("ColumnReferenceSegment"),
Ref("DatatypeSegment"),
Sequence("COLLATE", Ref("QuotedLiteralSegment"), optional=True),
AnyNumberOf(Ref("ColumnConstraintSegment")),
),
Sequence(
"DROP",
Ref.keyword("COLUMN", optional=True),
Sequence("IF", "EXISTS", optional=True),
Ref("ColumnReferenceSegment"),
OneOf("RESTRICT", "CASCADE", optional=True),
),
Sequence(
"ALTER",
Ref.keyword("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
OneOf(
Sequence(
Sequence("SET", "DATA", optional=True),
"TYPE",
Ref("DatatypeSegment"),
Sequence("COLLATE", Ref("QuotedLiteralSegment"), optional=True),
Sequence("USING", OneOf(Ref("ExpressionSegment")), optional=True),
),
Sequence(
"SET",
"DEFAULT",
OneOf(
OneOf(
Ref("LiteralGrammar"),
Ref("FunctionSegment"),
Ref("BareFunctionSegment"),
Ref("ExpressionSegment"),
)
),
),
Sequence("DROP", "DEFAULT"),
Sequence(OneOf("SET", "DROP", optional=True), "NOT", "NULL"),
Sequence("DROP", "EXPRESSION", Sequence("IF", "EXISTS", optional=True)),
Sequence(
"ADD",
"GENERATED",
OneOf("ALWAYS", Sequence("BY", "DEFAULT")),
"AS",
"IDENTITY",
Bracketed(
AnyNumberOf(Ref("AlterSequenceOptionsSegment")), optional=True
),
),
Sequence(
OneOf(
Sequence(
"SET",
"GENERATED",
OneOf("ALWAYS", Sequence("BY", "DEFAULT")),
),
Sequence("SET", Ref("AlterSequenceOptionsSegment")),
Sequence(
"RESTART", Sequence("WITH", Ref("NumericLiteralSegment"))
),
)
),
Sequence("DROP", "IDENTITY", Sequence("IF", "EXISTS", optional=True)),
Sequence("SET", "STATISTICS", Ref("NumericLiteralSegment")),
Sequence(
"SET",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
),
delimiter=Ref("CommaSegment"),
)
),
),
Sequence(
"RESET",
Bracketed(
Delimited(
Ref("ParameterNameSegment"), delimiter=Ref("CommaSegment")
)
),
),
Sequence(
"SET", "STORAGE", OneOf("PLAIN", "EXTERNAL", "EXTENDED", "MAIN")
),
),
),
Sequence(
"ADD",
Ref("TableConstraintSegment"),
Sequence("NOT", "VALID", optional=True),
),
Sequence("ADD", Ref("TableConstraintUsingIndexSegment")),
Sequence(
"ALTER",
"CONSTRAINT",
Ref("ParameterNameSegment"),
OneOf("DEFERRABLE", Sequence("NOT", "DEFERRABLE"), optional=True),
OneOf(
Sequence("INITIALLY", "DEFERRED"),
Sequence("INITIALLY", "IMMEDIATE"),
optional=True,
),
),
Sequence("VALIDATE", "CONSTRAINT", Ref("ParameterNameSegment")),
Sequence(
"DROP",
"CONSTRAINT",
Sequence("IF", "EXISTS", optional=True),
Ref("ParameterNameSegment"),
OneOf("RESTRICT", "CASCADE", optional=True),
),
Sequence(
OneOf("ENABLE", "DISABLE"),
"TRIGGER",
OneOf(Ref("ParameterNameSegment"), "ALL", "USER"),
),
Sequence(
"ENABLE", OneOf("REPLICA", "ALWAYS"), "TRIGGER", Ref("ParameterNameSegment")
),
Sequence(
OneOf(
"ENABLE",
"DISABLE",
Sequence("ENABLE", "REPLICA"),
Sequence("ENABLE", "RULE"),
),
"RULE",
Ref("ParameterNameSegment"),
),
Sequence(
OneOf("DISABLE", "ENABLE", "FORCE", Sequence("NO", "FORCE")),
"ROW",
"LEVEL",
"SECURITY",
),
Sequence("CLUSTER", "ON", Ref("ParameterNameSegment")),
Sequence("SET", "WITHOUT", OneOf("CLUSTER", "OIDS")),
Sequence("SET", "TABLESPACE", Ref("ParameterNameSegment")),
Sequence("SET", OneOf("LOGGED", "UNLOGGED")),
Sequence(
"SET",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
),
delimiter=Ref("CommaSegment"),
)
),
),
Sequence(
"RESET",
Bracketed(
Delimited(Ref("ParameterNameSegment"), delimiter=Ref("CommaSegment"))
),
),
Sequence(
Ref.keyword("NO", optional=True), "INHERIT", Ref("TableReferenceSegment")
),
Sequence("OF", Ref("ParameterNameSegment")),
Sequence("NOT", "OF"),
Sequence(
"OWNER",
"TO",
OneOf(
OneOf(Ref("ParameterNameSegment"), Ref("QuotedIdentifierSegment")),
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
),
),
Sequence(
"REPLICA",
"IDENTITY",
OneOf(
"DEFAULT",
Sequence("USING", "INDEX", Ref("ParameterNameSegment")),
"FULL",
"NOTHING",
),
),
)
@postgres_dialect.segment()
class CreateMaterializedViewStatementSegment(BaseSegment):
"""A `CREATE MATERIALIZED VIEW` statement.
As specified in https://www.postgresql.org/docs/14/sql-creatematerializedview.html
"""
type = "create_materialized_view_statement"
match_grammar = StartsWith(Sequence("CREATE", "MATERIALIZED", "VIEW"))
parse_grammar = Sequence(
"CREATE",
"MATERIALIZED",
"VIEW",
Ref("IfNotExistsGrammar", optional=True),
Ref("TableReferenceSegment"),
Ref("BracketedColumnReferenceListGrammar", optional=True),
AnyNumberOf(
Sequence("USING", Ref("ParameterNameSegment"), optional=True),
Sequence("TABLESPACE", Ref("ParameterNameSegment"), optional=True),
Sequence(
"WITH",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Sequence(
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
optional=True,
),
),
)
),
),
),
"AS",
OneOf(
OptionallyBracketed(Ref("SelectableGrammar")),
OptionallyBracketed(Sequence("TABLE", Ref("TableReferenceSegment"))),
Ref("ValuesClauseSegment"),
OptionallyBracketed(Sequence("EXECUTE", Ref("FunctionSegment"))),
),
Ref("WithDataClauseSegment", optional=True),
)
@postgres_dialect.segment()
class AlterMaterializedViewStatementSegment(BaseSegment):
"""A `ALTER MATERIALIZED VIEW` statement.
As specified in https://www.postgresql.org/docs/14/sql-altermaterializedview.html
"""
type = "alter_materialized_view_statement"
match_grammar = StartsWith(Sequence("ALTER", "MATERIALIZED", "VIEW"))
parse_grammar = Sequence(
"ALTER",
"MATERIALIZED",
"VIEW",
OneOf(
Sequence(
Sequence("IF", "EXISTS", optional=True),
Ref("TableReferenceSegment"),
OneOf(
Delimited(Ref("AlterMaterializedViewActionSegment")),
Sequence(
"RENAME",
Sequence("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
"TO",
Ref("ColumnReferenceSegment"),
),
Sequence("RENAME", "TO", Ref("TableReferenceSegment")),
Sequence("SET", "SCHEMA", Ref("SchemaReferenceSegment")),
),
),
Sequence(
Ref("TableReferenceSegment"),
Ref.keyword("NO", optional=True),
"DEPENDS",
"ON",
"EXTENSION",
Ref("ParameterNameSegment"),
),
Sequence(
"ALL",
"IN",
"TABLESPACE",
Ref("TableReferenceSegment"),
Sequence(
"OWNED",
"BY",
Delimited(Ref("ObjectReferenceSegment")),
optional=True,
),
"SET",
"TABLESPACE",
Ref("ParameterNameSegment"),
Sequence("NOWAIT", optional=True),
),
),
)
@postgres_dialect.segment()
class AlterMaterializedViewActionSegment(BaseSegment):
"""Alter Materialized View Action Segment.
Matches the definition of action in https://www.postgresql.org/docs/14/sql-altermaterializedview.html
"""
type = "alter_materialized_view_action_segment"
match_grammar = OneOf(
Sequence(
"ALTER",
Ref.keyword("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
OneOf(
Sequence("SET", "STATISTICS", Ref("NumericLiteralSegment")),
Sequence(
"SET",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
),
)
),
),
Sequence(
"RESET",
Bracketed(Delimited(Ref("ParameterNameSegment"))),
),
Sequence(
"SET", "STORAGE", OneOf("PLAIN", "EXTERNAL", "EXTENDED", "MAIN")
),
Sequence("SET", "COMPRESSION", Ref("ParameterNameSegment")),
),
),
Sequence("CLUSTER", "ON", Ref("ParameterNameSegment")),
Sequence("SET", "WITHOUT", "CLUSTER"),
Sequence(
"SET",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Sequence(
Ref("EqualsSegment"), Ref("LiteralGrammar"), optional=True
),
)
)
),
),
Sequence(
"RESET",
Bracketed(Delimited(Ref("ParameterNameSegment"))),
),
Sequence(
"OWNER",
"TO",
OneOf(
Ref("ObjectReferenceSegment"),
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
),
),
)
@postgres_dialect.segment()
class RefreshMaterializedViewStatementSegment(BaseSegment):
"""A `REFRESH MATERIALIZED VIEW` statement.
As specified in https://www.postgresql.org/docs/14/sql-refreshmaterializedview.html
"""
type = "refresh_materialized_view_statement"
match_grammar = StartsWith(Sequence("REFRESH", "MATERIALIZED", "VIEW"))
parse_grammar = Sequence(
"REFRESH",
"MATERIALIZED",
"VIEW",
Ref.keyword("CONCURRENTLY", optional=True),
Ref("TableReferenceSegment"),
Ref("WithDataClauseSegment", optional=True),
)
@postgres_dialect.segment()
class DropMaterializedViewStatementSegment(BaseSegment):
"""A `DROP MATERIALIZED VIEW` statement.
As specified in https://www.postgresql.org/docs/14/sql-dropmaterializedview.html
"""
type = "drop_materialized_view_statement"
match_grammar = StartsWith(Sequence("DROP", "MATERIALIZED", "VIEW"))
parse_grammar = Sequence(
"DROP",
"MATERIALIZED",
"VIEW",
Sequence("IF", "EXISTS", optional=True),
Delimited(Ref("TableReferenceSegment")),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment()
class AlterViewStatementSegment(BaseSegment):
"""An `ALTER VIEW` statement.
As specified in https://www.postgresql.org/docs/14/sql-alterview.html
"""
type = "alter_view_statement"
match_grammar = StartsWith(Sequence("ALTER", "VIEW"))
parse_grammar = Sequence(
"ALTER",
"VIEW",
Ref("IfExistsGrammar", optional=True),
Ref("TableReferenceSegment"),
OneOf(
Sequence(
"ALTER",
Ref.keyword("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
OneOf(
Sequence(
"SET",
"DEFAULT",
OneOf(
Ref("LiteralGrammar"),
Ref("FunctionSegment"),
Ref("BareFunctionSegment"),
Ref("ExpressionSegment"),
),
),
Sequence("DROP", "DEFAULT"),
),
),
Sequence(
"OWNER",
"TO",
OneOf(
Ref("ObjectReferenceSegment"),
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
),
),
Sequence(
"RENAME",
Ref.keyword("COLUMN", optional=True),
Ref("ColumnReferenceSegment"),
"TO",
Ref("ColumnReferenceSegment"),
),
Sequence("RENAME", "TO", Ref("TableReferenceSegment")),
Sequence("SET", "SCHEMA", Ref("SchemaReferenceSegment")),
Sequence(
"SET",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Sequence(
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
optional=True,
),
)
)
),
),
Sequence(
"RESET",
Bracketed(Delimited(Ref("ParameterNameSegment"))),
),
),
)
@postgres_dialect.segment(replace=True)
class CreateDatabaseStatementSegment(BaseSegment):
"""A `CREATE DATABASE` statement.
As specified in https://www.postgresql.org/docs/14/sql-createdatabase.html
"""
type = "create_database_statement"
match_grammar = StartsWith(Sequence("CREATE", "DATABASE"))
parse_grammar = Sequence(
"CREATE",
"DATABASE",
Ref("DatabaseReferenceSegment"),
Ref.keyword("WITH", optional=True),
AnyNumberOf(
Sequence(
"OWNER",
Ref("EqualsSegment", optional=True),
Ref("ObjectReferenceSegment"),
),
Sequence(
"TEMPLATE",
Ref("EqualsSegment", optional=True),
Ref("ObjectReferenceSegment"),
),
Sequence(
"ENCODING",
Ref("EqualsSegment", optional=True),
OneOf(Ref("QuotedLiteralSegment"), "DEFAULT"),
),
OneOf(
# LOCALE This is a shortcut for setting LC_COLLATE and LC_CTYPE at once.
# If you specify this, you cannot specify either of those parameters.
Sequence(
"LOCALE",
Ref("EqualsSegment", optional=True),
Ref("QuotedLiteralSegment"),
),
AnyNumberOf(
Sequence(
"LC_COLLATE",
Ref("EqualsSegment", optional=True),
Ref("QuotedLiteralSegment"),
),
Sequence(
"LC_CTYPE",
Ref("EqualsSegment", optional=True),
Ref("QuotedLiteralSegment"),
),
),
),
Sequence(
"TABLESPACE",
Ref("EqualsSegment", optional=True),
Ref("ParameterNameSegment"),
),
Sequence(
"ALLOW_CONNECTIONS",
Ref("EqualsSegment", optional=True),
Ref("BooleanLiteralGrammar"),
),
Sequence(
"CONNECTION",
"LIMIT",
Ref("EqualsSegment", optional=True),
Ref("NumericLiteralSegment"),
),
Sequence(
"IS_TEMPLATE",
Ref("EqualsSegment", optional=True),
Ref("BooleanLiteralGrammar"),
),
),
)
@postgres_dialect.segment()
class AlterDatabaseStatementSegment(BaseSegment):
"""A `ALTER DATABASE` statement.
As specified in https://www.postgresql.org/docs/14/sql-alterdatabase.html
"""
type = "alter_database_statement"
match_grammar = StartsWith(Sequence("ALTER", "DATABASE"))
parse_grammar = Sequence(
"ALTER",
"DATABASE",
Ref("DatabaseReferenceSegment"),
OneOf(
Sequence(
Ref.keyword("WITH", optional=True),
AnyNumberOf(
Sequence("ALLOW_CONNECTIONS", Ref("BooleanLiteralGrammar")),
Sequence(
"CONNECTION",
"LIMIT",
Ref("NumericLiteralSegment"),
),
Sequence("IS_TEMPLATE", Ref("BooleanLiteralGrammar")),
min_times=1,
),
),
Sequence("RENAME", "TO", Ref("DatabaseReferenceSegment")),
Sequence(
"OWNER",
"TO",
OneOf(
Ref("ObjectReferenceSegment"),
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
),
),
Sequence("SET", "TABLESPACE", Ref("ParameterNameSegment")),
Sequence(
"SET",
Ref("ParameterNameSegment"),
OneOf(
Sequence(
OneOf("TO", Ref("EqualsSegment")),
OneOf("DEFAULT", Ref("LiteralGrammar")),
),
Sequence("FROM", "CURRENT"),
),
),
Sequence("RESET", OneOf("ALL", Ref("ParameterNameSegment"))),
optional=True,
),
)
@postgres_dialect.segment()
class DropDatabaseStatementSegment(BaseSegment):
"""A `DROP DATABASE` statement.
As specified in https://www.postgresql.org/docs/14/sql-dropdatabase.html
"""
type = "drop_database_statement"
match_grammar = StartsWith(Sequence("DROP", "DATABASE"))
parse_grammar = Sequence(
"DROP",
"DATABASE",
Sequence("IF", "EXISTS", optional=True),
Ref("DatabaseReferenceSegment"),
Sequence(
Ref.keyword("WITH", optional=True),
Bracketed("FORCE"),
optional=True,
),
)
@postgres_dialect.segment()
class LikeOptionSegment(BaseSegment):
"""Like Option Segment.
As specified in https://www.postgresql.org/docs/13/sql-createtable.html
"""
type = "like_option_segment"
match_grammar = Sequence(
OneOf("INCLUDING", "EXCLUDING"),
OneOf(
"COMMENTS",
"CONSTRAINTS",
"DEFAULTS",
"GENERATED",
"IDENTITY",
"INDEXES",
"STATISTICS",
"STORAGE",
"ALL",
),
)
@postgres_dialect.segment(replace=True)
class ColumnConstraintSegment(BaseSegment):
"""A column option; each CREATE TABLE column can have 0 or more.
This matches the definition in https://www.postgresql.org/docs/13/sql-altertable.html
"""
type = "column_constraint_segment"
# Column constraint from
# https://www.postgresql.org/docs/12/sql-createtable.html
match_grammar = Sequence(
Sequence(
"CONSTRAINT",
Ref("ObjectReferenceSegment"), # Constraint name
optional=True,
),
OneOf(
Sequence(Ref.keyword("NOT", optional=True), "NULL"), # NOT NULL or NULL
Sequence(
"CHECK",
Bracketed(Ref("ExpressionSegment")),
Sequence("NO", "INHERIT", optional=True),
),
Sequence( # DEFAULT <value>
"DEFAULT",
OneOf(
Ref("LiteralGrammar"),
Ref("FunctionSegment"),
Ref("BareFunctionSegment"),
Ref("ExpressionSegment")
# ?? Ref('IntervalExpressionSegment')
),
),
Sequence("GENERATED", "ALWAYS", "AS", Ref("ExpressionSegment"), "STORED"),
Sequence(
"GENERATED",
OneOf("ALWAYS", Sequence("BY", "DEFAULT")),
"AS",
"IDENTITY",
Bracketed(
AnyNumberOf(Ref("AlterSequenceOptionsSegment")), optional=True
),
),
"UNIQUE",
Ref("PrimaryKeyGrammar"),
Sequence( # REFERENCES reftable [ ( refcolumn) ]
"REFERENCES",
Ref("ColumnReferenceSegment"),
# Foreign columns making up FOREIGN KEY constraint
Ref("BracketedColumnReferenceListGrammar", optional=True),
Sequence(
"ON",
OneOf("DELETE", "UPDATE"),
Ref("ReferentialActionSegment"),
optional=True,
),
),
),
OneOf("DEFERRABLE", Sequence("NOT", "DEFERRABLE"), optional=True),
OneOf(
Sequence("INITIALLY", "DEFERRED"),
Sequence("INITIALLY", "IMMEDIATE"),
optional=True,
),
)
@postgres_dialect.segment()
class PartitionBoundSpecSegment(BaseSegment):
"""partition_bound_spec as per https://www.postgresql.org/docs/13/sql-altertable.html."""
match_grammar = OneOf(
Sequence(
"IN",
Bracketed(
Delimited(Ref("ExpressionSegment"), delimiter=Ref("CommaSegment"))
),
),
Sequence(
"FROM",
Bracketed(
Delimited(
OneOf(Ref("ExpressionSegment"), "MINVALUE", "MAXVALUE"),
delimiter=Ref("CommaSegment"),
)
),
"TO",
Bracketed(
Delimited(
OneOf(Ref("ExpressionSegment"), "MINVALUE", "MAXVALUE"),
delimiter=Ref("CommaSegment"),
)
),
),
Sequence(
"WITH",
Bracketed(
Sequence(
"MODULUS",
Ref("NumericLiteralSegment"),
Ref("CommaSegment"),
"REMAINDER",
Ref("NumericLiteralSegment"),
)
),
),
)
@postgres_dialect.segment(replace=True)
class TableConstraintSegment(BaseSegment):
"""A table constraint, e.g. for CREATE TABLE.
As specified in https://www.postgresql.org/docs/13/sql-altertable.html
"""
type = "table_constraint_segment"
match_grammar = Sequence(
Sequence( # [ CONSTRAINT <Constraint name> ]
"CONSTRAINT", Ref("ObjectReferenceSegment"), optional=True
),
OneOf(
Sequence(
"CHECK",
Bracketed(Ref("ExpressionSegment")),
Sequence("NO", "INHERIT", optional=True),
),
Sequence( # UNIQUE ( column_name [, ... ] )
"UNIQUE",
Ref("BracketedColumnReferenceListGrammar"),
Ref("IndexParametersSegment", optional=True),
),
Sequence( # PRIMARY KEY ( column_name [, ... ] ) index_parameters
Ref("PrimaryKeyGrammar"),
# Columns making up PRIMARY KEY constraint
Ref("BracketedColumnReferenceListGrammar"),
Ref("IndexParametersSegment", optional=True),
),
Sequence(
"EXCLUDE",
Sequence("USING", Ref("FunctionSegment"), optional=True),
Bracketed(
Delimited(
Sequence(
Ref("ExcludeElementSegment"),
"WITH",
Ref("ComparisonOperatorGrammar"),
)
)
),
Ref("IndexParametersSegment", optional=True),
Sequence("WHERE", Ref("ExpressionSegment")),
),
Sequence( # FOREIGN KEY ( column_name [, ... ] )
# REFERENCES reftable [ ( refcolumn [, ... ] ) ]
"FOREIGN",
"KEY",
# Local columns making up FOREIGN KEY constraint
Ref("BracketedColumnReferenceListGrammar"),
"REFERENCES",
Ref("ColumnReferenceSegment"),
# Foreign columns making up FOREIGN KEY constraint
Ref("BracketedColumnReferenceListGrammar", optional=True),
Sequence("MATCH", OneOf("FULL", "PARTIAL", "SIMPLE"), optional=True),
Sequence(
"ON", "DELETE", Ref("ReferentialActionSegment"), optional=True
),
Sequence(
"ON", "UPDATE", Ref("ReferentialActionSegment"), optional=True
),
),
OneOf("DEFERRABLE", Sequence("NOT", "DEFERRABLE"), optional=True),
OneOf(
Sequence("INITIALLY", "DEFERRED"),
Sequence("INITIALLY", "IMMEDIATE"),
optional=True,
),
),
)
@postgres_dialect.segment()
class TableConstraintUsingIndexSegment(BaseSegment):
"""table_constraint_using_index as specified in https://www.postgresql.org/docs/13/sql-altertable.html."""
match_grammar = Sequence(
Sequence( # [ CONSTRAINT <Constraint name> ]
"CONSTRAINT", Ref("ObjectReferenceSegment"), optional=True
),
Sequence(
OneOf("UNIQUE", Sequence("PRIMARY", "KEY")),
"USING",
"INDEX",
Ref("ParameterNameSegment"),
),
OneOf("DEFERRABLE", Sequence("NOT", "DEFERRABLE"), optional=True),
OneOf(
Sequence("INITIALLY", "DEFERRED"),
Sequence("INITIALLY", "IMMEDIATE"),
optional=True,
),
)
@postgres_dialect.segment()
class IndexParametersSegment(BaseSegment):
"""index_parameters as specified in https://www.postgresql.org/docs/13/sql-altertable.html."""
type = "index_parameters"
match_grammar = Sequence(
Sequence("INCLUDE", Ref("BracketedColumnReferenceListGrammar"), optional=True),
Sequence(
"WITH",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
),
delimiter=Ref("CommaSegment"),
)
),
optional=True,
),
Sequence(
"USING", "INDEX", "TABLESPACE", Ref("ParameterNameSegment"), optional=True
),
)
@postgres_dialect.segment()
class ReferentialActionSegment(BaseSegment):
"""Foreign Key constraints.
As found in https://www.postgresql.org/docs/13/infoschema-referential-constraints.html
"""
type = "referential_action"
match_grammar = OneOf(
"CASCADE",
Sequence("SET", "NULL"),
Sequence("SET", "DEFAULT"),
"RESTRICT",
Sequence("NO", "ACTION"),
)
@postgres_dialect.segment()
class ExcludeElementSegment(BaseSegment):
"""exclude_element segment as found in https://www.postgresql.org/docs/13/sql-altertable.html."""
match_grammar = Sequence(
OneOf(Ref("ColumnReferenceSegment"), Bracketed(Ref("ExpressionSegment"))),
Ref("ParameterNameSegment", optional=True),
OneOf("ASC", "DESC", optional=True),
Sequence("NULLS", OneOf("FIRST", "LAST"), optional=True),
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesStatementSegment(BaseSegment):
"""`ALTER DEFAULT PRIVILEGES` statement.
```
ALTER DEFAULT PRIVILEGES
[ FOR { ROLE | USER } target_role [, ...] ]
[ IN SCHEMA schema_name [, ...] ]
abbreviated_grant_or_revoke
```
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_statement"
match_grammar = Sequence(
"ALTER",
"DEFAULT",
"PRIVILEGES",
Sequence(
"FOR",
OneOf("ROLE", "USER"),
Delimited(
Ref("ObjectReferenceSegment"),
terminator=OneOf("IN", "GRANT", "REVOKE"),
),
optional=True,
),
Sequence(
"IN",
"SCHEMA",
Delimited(
Ref("SchemaReferenceSegment"),
terminator=OneOf("GRANT", "REVOKE"),
),
optional=True,
),
OneOf(
Ref("AlterDefaultPrivilegesGrantSegment"),
Ref("AlterDefaultPrivilegesRevokeSegment"),
),
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesObjectPrivilegesSegment(BaseSegment):
"""`ALTER DEFAULT PRIVILEGES` object privileges.
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_object_privilege"
match_grammar = OneOf(
Sequence("ALL", Ref.keyword("PRIVILEGES", optional=True)),
Delimited(
"CREATE",
"DELETE",
"EXECUTE",
"INSERT",
"REFERENCES",
"SELECT",
"TRIGGER",
"TRUNCATE",
"UPDATE",
"USAGE",
terminator="ON",
),
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesSchemaObjectsSegment(BaseSegment):
"""`ALTER DEFAULT PRIVILEGES` schema object types.
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_schema_object"
match_grammar = OneOf(
"TABLES",
"FUNCTIONS",
"ROUTINES",
"SEQUENCES",
"TYPES",
"SCHEMAS",
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesToFromRolesSegment(BaseSegment):
"""The segment after `TO` / `FROM` in `ALTER DEFAULT PRIVILEGES`.
`{ [ GROUP ] role_name | PUBLIC } [, ...]`
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_to_from_roles"
match_grammar = OneOf(
Sequence(
Ref.keyword("GROUP", optional=True),
Ref("ObjectReferenceSegment"),
),
"PUBLIC",
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesGrantSegment(BaseSegment):
"""`GRANT` for `ALTER DEFAULT PRIVILEGES`.
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_grant"
match_grammar = Sequence(
"GRANT",
Ref("AlterDefaultPrivilegesObjectPrivilegesSegment"),
"ON",
Ref("AlterDefaultPrivilegesSchemaObjectsSegment"),
"TO",
Delimited(
Ref("AlterDefaultPrivilegesToFromRolesSegment"),
terminator="WITH",
),
Sequence("WITH", "GRANT", "OPTION", optional=True),
)
@postgres_dialect.segment()
class AlterDefaultPrivilegesRevokeSegment(BaseSegment):
"""`REVOKE` for `ALTER DEFAULT PRIVILEGES`.
https://www.postgresql.org/docs/13/sql-alterdefaultprivileges.html
"""
type = "alter_default_privileges_revoke"
match_grammar = Sequence(
"REVOKE",
Sequence("GRANT", "OPTION", "FOR", optional=True),
Ref("AlterDefaultPrivilegesObjectPrivilegesSegment"),
"ON",
Ref("AlterDefaultPrivilegesSchemaObjectsSegment"),
"FROM",
Delimited(
Ref("AlterDefaultPrivilegesToFromRolesSegment"),
terminator=OneOf("RESTRICT", "CASCADE"),
),
OneOf("RESTRICT", "CASCADE", optional=True),
)
@postgres_dialect.segment()
class CommentOnStatementSegment(BaseSegment):
"""`COMMENT ON` statement.
https://www.postgresql.org/docs/13/sql-comment.html
"""
type = "comment_on_statement"
match_grammar = StartsWith(Sequence("COMMENT", "ON"))
parse_grammar = Sequence(
"COMMENT",
"ON",
Sequence(
OneOf(
Sequence(
OneOf(
"TABLE",
# TODO: Create a ViewReferenceSegment
"VIEW",
),
Ref("TableReferenceSegment"),
),
Sequence(
"CAST",
Bracketed(
Sequence(
Ref("ObjectReferenceSegment"),
"AS",
Ref("ObjectReferenceSegment"),
),
),
),
Sequence(
"COLUMN",
# TODO: Does this correctly emit a Table Reference?
Ref("ColumnReferenceSegment"),
),
Sequence(
"CONSTRAINT",
Ref("ObjectReferenceSegment"),
Sequence(
"ON",
Ref.keyword("DOMAIN", optional=True),
Ref("ObjectReferenceSegment"),
),
),
Sequence(
"DATABASE",
Ref("DatabaseReferenceSegment"),
),
Sequence(
"EXTENSION",
Ref("ExtensionReferenceSegment"),
),
Sequence(
"FUNCTION",
Ref("FunctionNameSegment"),
Ref("FunctionParameterListGrammar"),
),
Sequence(
"INDEX",
Ref("IndexReferenceSegment"),
),
Sequence(
"SCHEMA",
Ref("SchemaReferenceSegment"),
),
# TODO: Split out individual items if they have references
Sequence(
OneOf(
"COLLATION",
"CONVERSION",
"DOMAIN",
"LANGUAGE",
"POLICY",
"PUBLICATION",
"ROLE",
"RULE",
"SEQUENCE",
"SERVER",
"STATISTICS",
"SUBSCRIPTION",
"TABLESPACE",
"TRIGGER",
"TYPE",
Sequence("ACCESS", "METHOD"),
Sequence("EVENT", "TRIGGER"),
Sequence("FOREIGN", "DATA", "WRAPPER"),
Sequence("FOREIGN", "TABLE"),
Sequence("MATERIALIZED", "VIEW"),
Sequence("TEXT", "SEARCH", "CONFIGURATION"),
Sequence("TEXT", "SEARCH", "DICTIONARY"),
Sequence("TEXT", "SEARCH", "PARSER"),
Sequence("TEXT", "SEARCH", "TEMPLATE"),
),
Ref("ObjectReferenceSegment"),
Sequence("ON", Ref("ObjectReferenceSegment"), optional=True),
),
Sequence(
OneOf(
"AGGREGATE",
"PROCEDURE",
"ROUTINE",
),
Ref("ObjectReferenceSegment"),
Bracketed(
Sequence(
# TODO: Is this too permissive?
Anything(),
),
),
),
),
Sequence("IS", OneOf(Ref("QuotedLiteralSegment"), "NULL")),
),
)
@postgres_dialect.segment(replace=True)
class CreateIndexStatementSegment(BaseSegment):
"""A `CREATE INDEX` statement.
As specified in https://www.postgresql.org/docs/13/sql-createindex.html
"""
type = "create_index_statement"
match_grammar = Sequence(
"CREATE",
Ref.keyword("UNIQUE", optional=True),
Ref("OrReplaceGrammar", optional=True),
"INDEX",
Ref.keyword("CONCURRENTLY", optional=True),
Ref("IfNotExistsGrammar", optional=True),
Ref("IndexReferenceSegment", optional=True),
"ON",
Ref.keyword("ONLY", optional=True),
Ref("TableReferenceSegment"),
OneOf(
Sequence("USING", Ref("FunctionSegment"), optional=True),
Bracketed(
Delimited(
Sequence(
OneOf(
Ref("ColumnReferenceSegment"),
OptionallyBracketed(Ref("FunctionSegment")),
Bracketed(Ref("ExpressionSegment")),
),
AnyNumberOf(
Sequence(
"COLLATE",
OneOf(
Ref("LiteralGrammar"),
Ref("QuotedIdentifierSegment"),
),
),
Sequence(
Ref("ParameterNameSegment"),
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
OneOf(
Ref("LiteralGrammar"),
Ref("QuotedIdentifierSegment"),
),
),
delimiter=Ref("CommaSegment"),
),
),
),
OneOf("ASC", "DESC"),
OneOf(
Sequence("NULLS", "FIRST"), Sequence("NULLS", "LAST")
),
),
),
delimiter=Ref("CommaSegment"),
)
),
),
AnyNumberOf(
Sequence(
"INCLUDE",
Bracketed(
Delimited(
Ref("ColumnReferenceSegment"), delimiter=Ref("CommaSegment")
)
),
),
Sequence(
"WITH",
Bracketed(
Delimited(
Sequence(
Ref("ParameterNameSegment"),
Ref("EqualsSegment"),
Ref("LiteralGrammar"),
),
delimiter=Ref("CommaSegment"),
)
),
),
Sequence("TABLESPACE", Ref("TableReferenceSegment")),
Sequence("WHERE", Ref("ExpressionSegment")),
),
)
@postgres_dialect.segment(replace=True)
class FrameClauseSegment(BaseSegment):
"""A frame clause for window functions.
As specified in https://www.postgresql.org/docs/13/sql-expressions.html
"""
type = "frame_clause"
_frame_extent = OneOf(
Sequence("CURRENT", "ROW"),
Sequence(
OneOf(Ref("NumericLiteralSegment"), "UNBOUNDED"),
OneOf("PRECEDING", "FOLLOWING"),
),
)
_frame_exclusion = Sequence(
"EXCLUDE",
OneOf(Sequence("CURRENT", "ROW"), "GROUP", "TIES", Sequence("NO", "OTHERS")),
optional=True,
)
match_grammar = Sequence(
Ref("FrameClauseUnitGrammar"),
OneOf(_frame_extent, Sequence("BETWEEN", _frame_extent, "AND", _frame_extent)),
_frame_exclusion,
)
@postgres_dialect.segment(replace=True)
class CreateSequenceOptionsSegment(BaseSegment):
"""Options for Create Sequence statement.
As specified in https://www.postgresql.org/docs/13/sql-createsequence.html
"""
type = "create_sequence_options_segment"
match_grammar = OneOf(
Sequence("AS", Ref("DatatypeSegment")),
Sequence(
"INCREMENT", Ref.keyword("BY", optional=True), Ref("NumericLiteralSegment")
),
OneOf(
Sequence("MINVALUE", Ref("NumericLiteralSegment")),
Sequence("NO", "MINVALUE"),
),
OneOf(
Sequence("MAXVALUE", Ref("NumericLiteralSegment")),
Sequence("NO", "MAXVALUE"),
),
Sequence(
"START", Ref.keyword("WITH", optional=True), Ref("NumericLiteralSegment")
),
Sequence("CACHE", Ref("NumericLiteralSegment")),
OneOf("CYCLE", Sequence("NO", "CYCLE")),
Sequence("OWNED", "BY", OneOf("NONE", Ref("ColumnReferenceSegment"))),
)
@postgres_dialect.segment(replace=True)
class CreateSequenceStatementSegment(BaseSegment):
"""Create Sequence Statement.
As specified in https://www.postgresql.org/docs/13/sql-createsequence.html
"""
type = "create_sequence_statement"
match_grammar = Sequence(
"CREATE",
Ref("TemporaryGrammar", optional=True),
"SEQUENCE",
Ref("IfNotExistsGrammar", optional=True),
Ref("SequenceReferenceSegment"),
AnyNumberOf(Ref("CreateSequenceOptionsSegment"), optional=True),
)
@postgres_dialect.segment(replace=True)
class AlterSequenceOptionsSegment(BaseSegment):
"""Dialect-specific options for ALTER SEQUENCE statement.
As specified in https://www.postgresql.org/docs/13/sql-altersequence.html
"""
type = "alter_sequence_options_segment"
match_grammar = OneOf(
Sequence("AS", Ref("DatatypeSegment")),
Sequence(
"INCREMENT", Ref.keyword("BY", optional=True), Ref("NumericLiteralSegment")
),
OneOf(
Sequence("MINVALUE", Ref("NumericLiteralSegment")),
Sequence("NO", "MINVALUE"),
),
OneOf(
Sequence("MAXVALUE", Ref("NumericLiteralSegment")),
Sequence("NO", "MAXVALUE"),
),
# N.B. The SEQUENCE NAME keywords are undocumented but are produced
# by the pg_dump utility. See discussion in issue #1857.
Sequence("SEQUENCE", "NAME", Ref("SequenceReferenceSegment")),
Sequence(
"START", Ref.keyword("WITH", optional=True), Ref("NumericLiteralSegment")
),
Sequence(
"RESTART", Ref.keyword("WITH", optional=True), Ref("NumericLiteralSegment")
),
Sequence("CACHE", Ref("NumericLiteralSegment")),
Sequence(Ref.keyword("NO", optional=True), "CYCLE"),
Sequence("OWNED", "BY", OneOf("NONE", Ref("ColumnReferenceSegment"))),
)
@postgres_dialect.segment(replace=True)
class AlterSequenceStatementSegment(BaseSegment):
"""Alter Sequence Statement.
As specified in https://www.postgresql.org/docs/13/sql-altersequence.html
"""
type = "alter_sequence_statement"
match_grammar = Sequence(
"ALTER",
"SEQUENCE",
Ref("IfExistsGrammar", optional=True),
Ref("SequenceReferenceSegment"),
OneOf(
AnyNumberOf(Ref("AlterSequenceOptionsSegment", optional=True)),
Sequence(
"OWNER",
"TO",
OneOf(Ref("ParameterNameSegment"), "CURRENT_USER", "SESSION_USER"),
),
Sequence("RENAME", "TO", Ref("SequenceReferenceSegment")),
Sequence("SET", "SCHEMA", Ref("SchemaReferenceSegment")),
),
)
@postgres_dialect.segment(replace=True)
class DropSequenceStatementSegment(BaseSegment):
"""Drop Sequence Statement.
As specified in https://www.postgresql.org/docs/13/sql-dropsequence.html
"""
type = "drop_sequence_statement"
match_grammar = Sequence(
"DROP",
"SEQUENCE",
Ref("IfExistsGrammar", optional=True),
Delimited(Ref("SequenceReferenceSegment")),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment()
class AnalyzeStatementSegment(BaseSegment):
"""Analyze Statement Segment.
As specified in https://www.postgresql.org/docs/13/sql-analyze.html
"""
type = "analyze_statement"
_option = Sequence(
OneOf("VERBOSE", "SKIP_LOCKED"), Ref("BooleanLiteralGrammar", optional=True)
)
_tables_and_columns = Sequence(
Ref("TableReferenceSegment"),
Bracketed(Delimited(Ref("ColumnReferenceSegment")), optional=True),
)
match_grammar = Sequence(
OneOf("ANALYZE", "ANALYSE"),
OneOf(Bracketed(Delimited(_option)), "VERBOSE", optional=True),
Delimited(_tables_and_columns, optional=True),
)
# Adding PostgreSQL specific statements
@postgres_dialect.segment(replace=True)
class StatementSegment(BaseSegment):
"""A generic segment, to any of its child subsegments."""
type = "statement"
parse_grammar = ansi_dialect.get_segment("StatementSegment").parse_grammar.copy(
insert=[
Ref("AlterDefaultPrivilegesStatementSegment"),
Ref("CommentOnStatementSegment"),
Ref("AnalyzeStatementSegment"),
Ref("CreateTableAsStatementSegment"),
Ref("AlterTriggerStatementSegment"),
Ref("SetStatementSegment"),
Ref("DropFunctionStatementSegment"),
Ref("CreatePolicyStatementSegment"),
Ref("DropPolicyStatementSegment"),
Ref("CreateMaterializedViewStatementSegment"),
Ref("AlterMaterializedViewStatementSegment"),
Ref("DropMaterializedViewStatementSegment"),
Ref("RefreshMaterializedViewStatementSegment"),
Ref("AlterDatabaseStatementSegment"),
Ref("DropDatabaseStatementSegment"),
Ref("AlterFunctionStatementSegment"),
Ref("AlterViewStatementSegment"),
Ref("ListenStatementSegment"),
Ref("NotifyStatementSegment"),
Ref("UnlistenStatementSegment"),
Ref("LoadStatementSegment"),
Ref("ResetStatementSegment"),
Ref("DiscardStatementSegment"),
],
)
match_grammar = ansi_dialect.get_segment("StatementSegment").match_grammar.copy()
@postgres_dialect.segment(replace=True)
class FunctionSegment(BaseSegment):
"""A scalar or aggregate function.
Maybe in the future we should distinguish between
aggregate functions and other functions. For now
we treat them the same because they look the same
for our purposes.
"""
type = "function"
match_grammar = Sequence(
Sequence(
Ref("FunctionNameSegment"),
Bracketed(
Ref(
"FunctionContentsGrammar",
# The brackets might be empty for some functions...
optional=True,
ephemeral_name="FunctionContentsGrammar",
)
),
),
Ref("PostFunctionGrammar", optional=True),
)
@postgres_dialect.segment(replace=True)
class CreateTriggerStatementSegment(BaseSegment):
"""Create Trigger Statement.
As Specified in https://www.postgresql.org/docs/14/sql-createtrigger.html
"""
type = "create_trigger"
match_grammar = Sequence(
"CREATE",
Sequence("OR", "REPLACE", optional=True),
Ref.keyword("CONSTRAINT", optional=True),
"TRIGGER",
Anything(),
)
parse_grammar = Sequence(
"CREATE",
Sequence("OR", "REPLACE", optional=True),
Ref.keyword("CONSTRAINT", optional=True),
"TRIGGER",
Ref("TriggerReferenceSegment"),
OneOf("BEFORE", "AFTER", Sequence("INSTEAD", "OF")),
Delimited(
"INSERT",
"DELETE",
"TRUNCATE",
Sequence(
"UPDATE",
Sequence(
"OF",
Delimited(
Ref("ColumnReferenceSegment"),
terminator=OneOf("OR", "ON"),
),
optional=True,
),
),
delimiter="OR",
),
"ON",
Ref("TableReferenceSegment"),
AnyNumberOf(
Sequence("FROM", Ref("TableReferenceSegment")),
OneOf(
Sequence("NOT", "DEFERRABLE"),
Sequence(
Ref.keyword("DEFERRABLE", optional=True),
OneOf(
Sequence("INITIALLY", "IMMEDIATE"),
Sequence("INITIALLY", "DEFERRED"),
),
),
),
Sequence(
"REFERENCING",
OneOf("OLD", "NEW"),
"TABLE",
"AS",
Ref("TableReferenceSegment"),
Sequence(
OneOf("OLD", "NEW"),
"TABLE",
"AS",
Ref("TableReferenceSegment"),
optional=True,
),
),
Sequence(
"FOR", Ref.keyword("EACH", optional=True), OneOf("ROW", "STATEMENT")
),
Sequence("WHEN", Bracketed(Ref("ExpressionSegment"))),
),
Sequence(
"EXECUTE",
OneOf("FUNCTION", "PROCEDURE"),
Ref("FunctionNameIdentifierSegment"),
Bracketed(Ref("FunctionContentsGrammar", optional=True)),
),
)
@postgres_dialect.segment()
class AlterTriggerStatementSegment(BaseSegment):
"""Alter Trigger Statement.
As Specified in https://www.postgresql.org/docs/14/sql-altertrigger.html
"""
type = "alter_trigger"
match_grammar = Sequence("ALTER", "TRIGGER", Anything())
parse_grammar = Sequence(
"ALTER",
"TRIGGER",
Ref("TriggerReferenceSegment"),
"ON",
Ref("TableReferenceSegment"),
OneOf(
Sequence("RENAME", "TO", Ref("TriggerReferenceSegment")),
Sequence(
Ref.keyword("NO", optional=True),
"DEPENDS",
"ON",
"EXTENSION",
Ref("ParameterNameSegment"),
),
),
)
@postgres_dialect.segment(replace=True)
class DropTriggerStatementSegment(BaseSegment):
"""Drop Trigger Statement.
As Specified in https://www.postgresql.org/docs/14/sql-droptrigger.html
"""
type = "drop_trigger"
match_grammar = Sequence("DROP", "TRIGGER", Anything())
parse_grammar = Sequence(
"DROP",
"TRIGGER",
Sequence("IF", "EXISTS", optional=True),
Ref("TriggerReferenceSegment"),
"ON",
Ref("TableReferenceSegment"),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment(replace=True)
class InsertStatementSegment(BaseSegment):
"""An `INSERT` statement.
As Specified in https://www.postgresql.org/docs/14/sql-insert.html
N.B. This is not a complete implementation of the documentation above.
TODO: Implement complete postgres insert statement structure.
"""
type = "insert_statement"
match_grammar = StartsWith("INSERT")
parse_grammar = Sequence(
"INSERT",
"INTO",
Ref("TableReferenceSegment"),
Ref("BracketedColumnReferenceListGrammar", optional=True),
Sequence("OVERRIDING", OneOf("SYSTEM", "USER"), "VALUE", optional=True),
Ref("SelectableGrammar"),
)
@postgres_dialect.segment(replace=True)
class DropTypeStatementSegment(BaseSegment):
"""Drop Type Statement.
As specified in https://www.postgresql.org/docs/14/sql-droptype.html
"""
type = "drop_type_statement"
match_grammar = Sequence(
"DROP",
"TYPE",
Ref("IfExistsGrammar", optional=True),
Delimited(Ref("DatatypeSegment")),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment()
class SetStatementSegment(BaseSegment):
"""Set Statement.
As specified in https://www.postgresql.org/docs/14/sql-set.html
"""
type = "set_statement"
match_grammar = Sequence(
"SET",
OneOf("SESSION", "LOCAL", optional=True),
OneOf(
Sequence(
Ref("ParameterNameSegment"),
OneOf("TO", Ref("EqualsSegment")),
OneOf(
Delimited(Ref("LiteralGrammar"), Ref("NakedIdentifierSegment")),
"DEFAULT",
),
),
Sequence(
"TIME", "ZONE", OneOf(Ref("QuotedLiteralSegment"), "LOCAL", "DEFAULT")
),
),
)
@postgres_dialect.segment()
class CreatePolicyStatementSegment(BaseSegment):
"""A `CREATE POLICY` statement.
As Specified in https://www.postgresql.org/docs/14/sql-createpolicy.html
"""
type = "create_policy_statement"
match_grammar = StartsWith(Sequence("CREATE", "POLICY"))
parse_grammar = Sequence(
"CREATE",
"POLICY",
Ref("ObjectReferenceSegment"),
"ON",
Ref("TableReferenceSegment"),
Sequence("AS", OneOf("PERMISSIVE", "RESTRICTIVE"), optional=True),
Sequence(
"FOR", OneOf("ALL", "SELECT", "INSERT", "UPDATE", "DELETE"), optional=True
),
Sequence(
"TO",
Delimited(
OneOf(
Ref("ObjectReferenceSegment"),
"PUBLIC",
"CURRENT_ROLE",
"CURRENT_USER",
"SESSION_USER",
)
),
optional=True,
),
Sequence("USING", Bracketed(Ref("ExpressionSegment")), optional=True),
Sequence("WITH", "CHECK", Bracketed(Ref("ExpressionSegment")), optional=True),
)
@postgres_dialect.segment()
class DropPolicyStatementSegment(BaseSegment):
"""A `DROP POLICY` statement.
As Specified in https://www.postgresql.org/docs/14/sql-droppolicy.html
"""
type = "drop_policy_statement"
match_grammar = StartsWith(Sequence("DROP", "POLICY"))
parse_grammar = Sequence(
"DROP",
"POLICY",
Ref("IfExistsGrammar", optional=True),
Ref("ObjectReferenceSegment"),
"ON",
Ref("TableReferenceSegment"),
OneOf("CASCADE", "RESTRICT", optional=True),
)
@postgres_dialect.segment()
class LoadStatementSegment(BaseSegment):
"""A `LOAD` statement.
As Specified in https://www.postgresql.org/docs/14/sql-load.html
"""
type = "load_statement"
match_grammar = Sequence(
"LOAD",
Ref("QuotedLiteralSegment"),
)
@postgres_dialect.segment()
class ResetStatementSegment(BaseSegment):
"""A `RESET` statement.
As Specified in https://www.postgresql.org/docs/14/sql-reset.html
"""
type = "reset_statement"
match_grammar = Sequence(
"RESET",
OneOf("ALL", Ref("ParameterNameSegment")),
)
@postgres_dialect.segment()
class DiscardStatementSegment(BaseSegment):
"""A `DISCARD` statement.
As Specified in https://www.postgresql.org/docs/14/sql-discard.html
"""
type = "discard_statement"
match_grammar = Sequence(
"DISCARD",
OneOf(
"ALL",
"PLANS",
"SEQUENCES",
"TEMPORARY",
"TEMP",
),
)
@postgres_dialect.segment()
class ListenStatementSegment(BaseSegment):
"""A `LISTEN` statement.
As Specified in https://www.postgresql.org/docs/14/sql-listen.html
"""
type = "listen_statement"
match_grammar = Sequence("LISTEN", Ref("SingleIdentifierGrammar"))
@postgres_dialect.segment()
class NotifyStatementSegment(BaseSegment):
"""A `NOTIFY` statement.
As Specified in https://www.postgresql.org/docs/14/sql-notify.html
"""
type = "notify_statement"
match_grammar = Sequence(
"NOTIFY",
Ref("SingleIdentifierGrammar"),
Sequence(
Ref("CommaSegment"),
Ref("QuotedLiteralSegment"),
optional=True,
),
)
@postgres_dialect.segment()
class UnlistenStatementSegment(BaseSegment):
"""A `UNLISTEN` statement.
As Specified in https://www.postgresql.org/docs/14/sql-unlisten.html
"""
type = "unlisten_statement"
match_grammar = Sequence(
"UNLISTEN",
OneOf(
Ref("SingleIdentifierGrammar"),
Ref("StarSegment"),
),
)
@postgres_dialect.segment(replace=True)
class TruncateStatementSegment(BaseSegment):
"""`TRUNCATE TABLE` statement.
https://www.postgresql.org/docs/14/sql-truncate.html
"""
type = "truncate_table"
match_grammar = Sequence(
"TRUNCATE",
Ref.keyword("TABLE", optional=True),
Delimited(
OneOf(
Sequence(
Ref.keyword("ONLY", optional=True),
Ref("TableReferenceSegment"),
),
Sequence(
Ref("TableReferenceSegment"),
Ref("StarSegment", optional=True),
),
),
),
Sequence(
OneOf("RESTART", "CONTINUE"),
"IDENTITY",
optional=True,
),
OneOf(
"CASCADE",
"RESTRICT",
optional=True,
),
)
| 32.02129 | 120 | 0.493821 |
acf4c84d26d58f95b142d8a039c79b2b96ef2574 | 828 | py | Python | Python/peso.py | Kauan677/Projetos-Python | 62f6b476e6d250d9ff31c95808b31ebd3ab4fdbb | [
"MIT"
] | 1 | 2022-03-03T23:19:57.000Z | 2022-03-03T23:19:57.000Z | Python/peso.py | Kauan677/Projetos-Python | 62f6b476e6d250d9ff31c95808b31ebd3ab4fdbb | [
"MIT"
] | null | null | null | Python/peso.py | Kauan677/Projetos-Python | 62f6b476e6d250d9ff31c95808b31ebd3ab4fdbb | [
"MIT"
] | null | null | null | from time import sleep
import colorama as color
color.init()
def kg():
maior = 0
menor = 0
for c in range(1, 6):
peso = float(input(f'Quantos Kg tem a {c}ª pessoa: Kg'))
if c == 1:
maior = peso
menor = peso
else:
if peso > maior:
maior = peso
if peso < menor:
menor = peso
print('Processing...')
sleep(3)
print(f'O maior peso da lista é \033[32m{maior}\033[mKg')
print(f'E o menor peso da lista é \033[31m{menor}Kg')
sleep(3.5)
while True:
retorno = kg()
retorno = str(input('\033[32mQuer consultar novamente?\033[m '))
if retorno in ['Sim', 'SIM', 'sim']:
pass
elif retorno in ['Não', 'NÃO', 'não', 'Nao', 'NAO','nao']:
break
else:
break
| 23 | 68 | 0.512077 |
acf4cab77efe37e003265bddb5166041c6aff5ea | 9,892 | py | Python | create_master_passwd_ui.py | aarriitt666/VPass | c0fb0304bbfedbcb2d658125d16b52f9d273f8ab | [
"MIT"
] | null | null | null | create_master_passwd_ui.py | aarriitt666/VPass | c0fb0304bbfedbcb2d658125d16b52f9d273f8ab | [
"MIT"
] | null | null | null | create_master_passwd_ui.py | aarriitt666/VPass | c0fb0304bbfedbcb2d658125d16b52f9d273f8ab | [
"MIT"
] | null | null | null | import os.path
import tkinter.messagebox
from tkinter import *
import cryptography.fernet
import encrypting
import get_hash
import key_generation
import mypass_ui
MAIN_BG = '#05132b'
MAIN_TEXT = '#2ce5e8'
MAIN_FONT = ('Courier', 14, 'normal')
SUB_FONT = ('Courier', 9, 'normal')
ENTRY_FONT = ('Courier', 14, 'italic')
BUTTON_FONT = ('Courier', 12, 'bold')
BUTTON_FONT2 = ('Courier', 11, 'bold')
ENTRY_BOXES_BG = '#ab8b82'
ENTRY_BOXES_FG = '#060d47'
BUTTON_BG = '#d4483b'
BUTTON_FG = '#ebeb0e'
BUTTON_ACTIVE_BG = '#780c1c'
BUTTON_ACTIVE_FG = '#d1d0c9'
IMPORTANT_FG = '#e3e154'
MAIN_HL_BG = '#5c350b'
valid_or_not = False
new_get_hash = None
class UserInterface(Tk):
def __init__(self):
super().__init__()
self.valid_or_not = valid_or_not
self.logins_data_existed = False
self.title('VPass')
self.config(bg=MAIN_BG)
self.minsize(870, 500)
# Variables
self.new_get_hash = new_get_hash
self.master_password_button_click = False
# Removing default title bar and default geometry
self.overrideredirect(True) # turns off title bar, geometry
self.geometry('870x500+150+75') # set new geometry
# make a frame for the title bar
self.title_bar = Frame(self, bg='black', relief='flat', bd=2)
self.title_bar.bind('<Map>', self.screen_appear)
self.title_bar.bind('<Button-3>', self.show_screen)
# Put a close button on the title bar
self.close_button = Button(self.title_bar, text='X', command=self.closing_app, fg=BUTTON_FG, bg=BUTTON_BG,
activeforeground='white', activebackground='black', highlightbackground=MAIN_HL_BG)
# Put a minimize button on the title bar
self.minimize_button = Button(self.title_bar, text='-', command=self.hide_screen, fg=BUTTON_FG, bg=BUTTON_BG,
activeforeground='white', activebackground='black',
highlightbackground=MAIN_HL_BG)
# Grid the widgets
self.title_bar.grid(ipady=0, ipadx=0, column=0, row=0, rowspan=16, columnspan=23, sticky=N)
self.close_button.grid(columnspan=23, padx=840, ipady=2, ipadx=7, column=2, row=0, sticky=E)
self.minimize_button.grid(columnspan=23, padx=810, ipady=2,
ipadx=7, column=1, row=0, sticky=E)
# Bind title bar motion to the move window function
self.title_bar.bind('<B1-Motion>', self.move_window)
# Canvas
self.canvas = Canvas(width=500, height=330)
self.canvas.grid(sticky=W, row=0, column=0, rowspan=16, columnspan=4, padx=0)
self.logo_img = PhotoImage(file='logo_custom.png')
self.canvas.create_image(300, 120, image=self.logo_img)
self.canvas.config(bg=MAIN_BG, highlightthickness=0)
# Welcome Label
self.welcome_label = Label(text='Welcome. Please create a master password!', bg=MAIN_BG, font=MAIN_FONT,
fg=MAIN_TEXT,
highlightthickness=0)
self.welcome_label.place(x=230, y=290)
# Password Label
self.password_label1 = Label(text='Create', bg=MAIN_BG,
font=MAIN_FONT, fg=MAIN_TEXT, highlightthickness=0)
self.password_label1.grid(sticky=E, row=6, column=0, rowspan=16, padx=25)
self.password_label2 = Label(text='Master', bg=MAIN_BG,
font=MAIN_FONT, fg=MAIN_TEXT, highlightthickness=0)
self.password_label2.grid(sticky=E, row=7, column=0, rowspan=16, padx=25)
self.password_label3 = Label(text='Password', bg=MAIN_BG,
font=MAIN_FONT, fg=MAIN_TEXT, highlightthickness=0)
self.password_label3.grid(sticky=E, row=8, column=0, rowspan=16, padx=25)
# Password Entry Box
self.password_entry_box_txt = StringVar()
self.password_entry_box = Entry(textvariable=self.password_entry_box_txt, font=ENTRY_FONT, width=29,
highlightthickness=0, bg=ENTRY_BOXES_BG, fg=ENTRY_BOXES_FG)
self.password_entry_box.focus()
self.password_entry_box.grid(sticky=W, row=6, column=1, rowspan=16, columnspan=23, ipady=3)
self.password_entry_box.bind(
'<Return>', self.add_or_change_master_password_button_click_bind)
# Show Info Label
self.show_important_info_label1 = Label(text='Don\'t forget', bg=MAIN_BG, font=SUB_FONT, fg=IMPORTANT_FG,
highlightthickness=0)
self.show_important_info_label1.place(x=610, y=340)
self.show_important_info_label2 = Label(text='your master password!', bg=MAIN_BG, font=SUB_FONT,
fg=IMPORTANT_FG,
highlightthickness=0)
self.show_important_info_label2.place(x=580, y=360)
# tkinter.messagebox.showinfo('showinfo', 'Don\'t forget your master password!')
# Save Button
add_button_txt = StringVar()
self.add_button = Button(textvariable=add_button_txt, width=49, font=BUTTON_FONT, highlightthickness=3, bd=0,
bg=BUTTON_BG, activebackground=BUTTON_ACTIVE_BG, fg=BUTTON_FG,
activeforeground=BUTTON_ACTIVE_FG,
command=self.add_or_change_master_password_button_click)
self.add_button.grid(sticky=W, row=8, column=1, rowspan=16, columnspan=23, pady=400)
add_button_txt.set('Save')
# Back to main app Button
self.back_to_vpass_button_txt = StringVar()
self.back_to_vpass_button = Button(textvariable=self.back_to_vpass_button_txt, width=15, font=BUTTON_FONT,
highlightthickness=3, bd=0,
bg=MAIN_BG, activebackground=BUTTON_ACTIVE_BG, fg=BUTTON_FG,
activeforeground=BUTTON_ACTIVE_FG, command=self.back_to_vpass_app_wins)
self.back_to_vpass_button_txt.set('Back to Vpass')
self.back_to_vpass_button.place(x=630, y=70)
def move_window(self, event):
self.geometry('+{0}+{1}'.format(event.x_root, event.y_root))
def hide_screen(self):
self.overrideredirect(False)
self.iconify()
def show_screen(self):
self.deiconify()
self.overrideredirect(True)
def screen_appear(self, event):
self.overrideredirect(True)
def add_or_change_master_password_button_click_bind(self, event):
self.add_or_change_master_password_button_click()
def add_or_change_master_password_button_click(self):
self.master_password_button_click = True
self.add_or_change_master_password()
return self.master_password_button_click
def add_or_change_master_password(self):
global new_get_hash
if self.master_password_button_click is True:
get_user_master_passwd = self.password_entry_box.get()
if len(get_user_master_passwd) == 0:
tkinter.messagebox.showinfo('Return', 'This field cannot be empty when save!')
else:
new_get_hash = get_hash.GetHash()
new_get_hash.let_us_hash_it(password=get_user_master_passwd)
new_key_gen = key_generation.KeyGen()
new_key_gen.automate_key_generation_using_password_and_salt(
user_password=get_user_master_passwd)
if new_get_hash.hash_it_has_it:
self.password_entry_box_txt.set('')
question_result = tkinter.messagebox.askquestion(
'Do you want to return to the main application?')
if question_result == 'yes':
self.destroy()
mypass_ui.UserInterface()
else:
self.destroy()
UserInterface()
def back_to_vpass_app_wins(self):
self.destroy()
mypass_ui.UserInterface()
def encryption_starting(self):
new_encrypting = encrypting.Encrypting()
new_valid_or_not = self.valid_or_not
new_encrypting.encrypting(passwd_validity=new_valid_or_not, file_path='logins_data.csv')
new_encrypting.encrypting(passwd_validity=new_valid_or_not,
file_path='logins_data_json.json')
def decryption_starting(self):
new_decrypting = encrypting.Encrypting()
new_valid_or_not = self.valid_or_not
new_decrypting.decrypting(passwd_validity=new_valid_or_not, file_path='logins_data.csv')
new_decrypting.decrypting(passwd_validity=new_valid_or_not,
file_path='logins_data_json.json')
def closing_app(self):
if self.valid_or_not is True:
try:
self.encryption_starting()
except (cryptography.fernet.InvalidToken, TypeError):
with open('vpass_error_log.txt', mode='a') as f:
custom_error_msg = 'In closing_app function of create_master_passwd_ui.py, an error raises about ' \
'Fernet InvalidToken when trying to encrypt using encryption_starting ' \
'function. Also, TypeError may be raised if the content isn\'t a byte ' \
' type.'
f.write(custom_error_msg)
self.destroy()
def login_data_exist(self):
if os.path.exists('logins_data.csv'):
self.logins_data_existed = True
return self.logins_data_existed
def main():
UserInterface()
mainloop()
if __name__ == '__main__':
main()
| 47.557692 | 120 | 0.623433 |
acf4cbe6b98c97d493d1b518ba43aa7f6dc87b43 | 8,450 | py | Python | sunpy/io/cdf.py | akash5100/sunpy | 6f586392f9799383017e0566d4303928183c06be | [
"BSD-2-Clause"
] | null | null | null | sunpy/io/cdf.py | akash5100/sunpy | 6f586392f9799383017e0566d4303928183c06be | [
"BSD-2-Clause"
] | null | null | null | sunpy/io/cdf.py | akash5100/sunpy | 6f586392f9799383017e0566d4303928183c06be | [
"BSD-2-Clause"
] | null | null | null | import cdflib
import pandas as pd
from cdflib.epochs import CDFepoch
import astropy.units as u
from sunpy import log
from sunpy.timeseries import GenericTimeSeries
from sunpy.util.exceptions import warn_user
__all__ = ['read_cdf']
def read_cdf(fname):
"""
Read a CDF file that follows the ISTP/IACG guidelines.
Parameters
----------
fname : path-like
Location of single CDF file to read.
Returns
-------
list[GenericTimeSeries]
A list of time series objects, one for each unique time index within
the CDF file.
References
----------
Space Physics Guidelines for CDF https://spdf.gsfc.nasa.gov/sp_use_of_cdf.html
"""
cdf = cdflib.CDF(str(fname))
# Extract the time varying variables
cdf_info = cdf.cdf_info()
meta = cdf.globalattsget()
all_var_keys = cdf_info['rVariables'] + cdf_info['zVariables']
var_attrs = {key: cdf.varattsget(key) for key in all_var_keys}
# Get keys that depend on time
var_keys = [var for var in var_attrs if 'DEPEND_0' in var_attrs[var]]
# Get unique time index keys
time_index_keys = sorted(set([var_attrs[var]['DEPEND_0'] for var in var_keys]))
all_ts = []
# For each time index, construct a GenericTimeSeries
for index_key in time_index_keys:
try:
index = cdf.varget(index_key)
except ValueError:
# Empty index for cdflib >= 0.3.20
continue
if index is None:
# Empty index for cdflib <0.3.20
continue
# TODO: use to_astropy_time() instead here when we drop pandas in timeseries
index = CDFepoch.to_datetime(index)
df = pd.DataFrame(index=pd.DatetimeIndex(name=index_key, data=index))
units = {}
for var_key in sorted(var_keys):
attrs = var_attrs[var_key]
if attrs['DEPEND_0'] != index_key:
continue
# Get data
if cdf.varinq(var_key)['Last_Rec'] == -1:
log.debug(f'Skipping {var_key} in {fname} as it has zero elements')
continue
data = cdf.varget(var_key)
# Get units
if 'UNITS' in attrs:
unit_str = attrs['UNITS']
try:
unit = u.Unit(unit_str)
except ValueError:
if unit_str in _known_units:
unit = _known_units[unit_str]
else:
warn_user(f'astropy did not recognize units of "{unit_str}". '
'Assigning dimensionless units. '
'If you think this unit should not be dimensionless, '
'please raise an issue at https://github.com/sunpy/sunpy/issues')
unit = u.dimensionless_unscaled
else:
warn_user(f'No units provided for variable "{var_key}". '
'Assigning dimensionless units.')
unit = u.dimensionless_unscaled
if data.ndim > 2:
# Skip data with dimensions >= 3 and give user warning
warn_user(f'The variable "{var_key}" has been skipped because it has more than 2 dimensions, which is unsupported.')
elif data.ndim == 2:
# Multiple columns, give each column a unique label
for i, col in enumerate(data.T):
df[var_key + f'_{i}'] = col
units[var_key + f'_{i}'] = unit
else:
# Single column
df[var_key] = data
units[var_key] = unit
all_ts.append(GenericTimeSeries(data=df, units=units, meta=meta))
if not len(all_ts):
log.debug(f'No data found in file {fname}')
return all_ts
# Unfortunately (unlike e.g. FITS), there is no standard for the strings that
# CDF files use to represent units. To allow for this we maintain a dictionary
# mapping unit strings to their astropy unit equivalents.
#
# Please only add new entries if
# 1. A user identifies which specific mission/data source they are needed for
# 2. The mapping from the string to unit is un-ambiguous. If we get this
# wrong then users will silently have the wrong units in their data!
_known_units = {'ratio': u.dimensionless_unscaled,
'NOTEXIST': u.dimensionless_unscaled,
'Unitless': u.dimensionless_unscaled,
'unitless': u.dimensionless_unscaled,
'Quality_Flag': u.dimensionless_unscaled,
'None': u.dimensionless_unscaled,
'none': u.dimensionless_unscaled,
' none': u.dimensionless_unscaled,
'counts': u.dimensionless_unscaled,
'cnts': u.dimensionless_unscaled,
'microW m^-2': u.mW * u.m**-2,
'years': u.yr,
'days': u.d,
'#/cc': u.cm**-3,
'#/cm^3': u.cm**-3,
'cm^{-3}': u.cm**-3,
'particles cm^-3': u.cm**-3,
'n/cc (from moments)': u.cm**-3,
'n/cc (from fits)': u.cm**-3,
'Per cc': u.cm**-3,
'#/cm3': u.cm**-3,
'n/cc': u.cm**-3,
'km/sec': u.km / u.s,
'km/sec (from fits)': u.km / u.s,
'km/sec (from moments)': u.km / u.s,
'Km/s': u.km / u.s,
'Volts': u.V,
'earth radii': u.earthRad,
'Re': u.earthRad,
'Earth Radii': u.earthRad,
'Re (1min)': u.earthRad,
'Re (1hr)': u.earthRad,
'Degrees': u.deg,
'degrees': u.deg,
'Deg': u.deg,
'deg (from fits)': u.deg,
'deg (from moments)': u.deg,
'deg (>200)': u.deg,
'Deg K': u.K,
'deg_K': u.K,
'#/{cc*(cm/s)^3}': (u.cm**3 * (u.cm / u.s)**3)**-1,
'sec': u.s,
'Samples/s': 1 / u.s,
'seconds': u.s,
'nT GSE': u.nT,
'nT GSM': u.nT,
'nT DSL': u.nT,
'nT SSL': u.nT,
'nT (1min)': u.nT,
'nT (3sec)': u.nT,
'nT (1hr)': u.nT,
'nT (>200)': u.nT,
'msec': u.ms,
'milliseconds': u.ms,
'#/cm2-ster-eV-sec': 1 / (u.cm**2 * u.sr * u.eV * u.s),
'#/(cm^2*s*sr*MeV/nuc)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'#/(cm^2*s*sr*Mev/nuc)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'#/(cm^2*s*sr*Mev/nucleon)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'#/(cm2-steradian-second-MeV/nucleon) ': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'1/(cm2 Sr sec MeV/nucleon)': 1 / (u.cm**2 * u.sr * u.s * u.MeV),
'1/(cm**2-s-sr-MeV)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'1/(cm**2-s-sr-MeV/nuc.)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'(cm^2 s sr MeV/n)^-1': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'cm!E-2!Nsr!E-1!Nsec!E-1!N(MeV/nuc)!E-1!N': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'cm!E-2!Nsr!E-1!Nsec!E-1!NMeV!E-1!N': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'1/(cm^2 sec ster MeV)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'(cm^2 s sr MeV)^-1': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'cnts/sec/sr/cm^2/MeV': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'particles / (s cm^2 sr MeV)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'particles / (s cm^2 sr MeV/n)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'particles/(s cm2 sr MeV/n)': 1 / (u.cm**2 * u.s * u.sr * u.MeV),
'1/(cm**2-s-sr)': 1 / (u.cm**2 * u.s * u.sr),
'1/(SQcm-ster-s)': 1 / (u.cm**2 * u.s * u.sr),
'1/(SQcm-ster-s)..': 1 / (u.cm**2 * u.s * u.sr),
'photons cm^-2 s^-1': 1 / (u.cm**2 * u.s),
'Counts/256sec': 1 / (256 * u.s),
'Counts/hour': 1 / u.hr,
'counts/min': 1 / u.min,
'counts / s': 1/u.s,
'counts/s': 1/u.s,
'cnts/sec': 1/u.s,
'counts s!E-1!N': 1/u.s,
}
| 38.761468 | 132 | 0.473728 |
acf4cc8dd1c726d1ae252f102a55ebcb659c9134 | 6,290 | py | Python | pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_rsvp_session_extensive/output/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_rsvp_session_extensive/output/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | null | null | null | pybind/slxos/v16r_1_00b/brocade_mpls_rpc/show_mpls_rsvp_session_extensive/output/__init__.py | shivharis/pybind | 4e1c6d54b9fd722ccec25546ba2413d79ce337e6 | [
"Apache-2.0"
] | 1 | 2021-11-05T22:15:42.000Z | 2021-11-05T22:15:42.000Z |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import mpls_rsvp_session_extensive
class output(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls - based on the path /brocade_mpls_rpc/show-mpls-rsvp-session-extensive/output. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__mpls_rsvp_session_extensive',)
_yang_name = 'output'
_rest_name = 'output'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__mpls_rsvp_session_extensive = YANGDynClass(base=mpls_rsvp_session_extensive.mpls_rsvp_session_extensive, is_container='container', presence=False, yang_name="mpls-rsvp-session-extensive", rest_name="mpls-rsvp-session-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'brocade_mpls_rpc', u'show-mpls-rsvp-session-extensive', u'output']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'show-mpls-rsvp-session-extensive', u'output']
def _get_mpls_rsvp_session_extensive(self):
"""
Getter method for mpls_rsvp_session_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp_session_extensive/output/mpls_rsvp_session_extensive (container)
"""
return self.__mpls_rsvp_session_extensive
def _set_mpls_rsvp_session_extensive(self, v, load=False):
"""
Setter method for mpls_rsvp_session_extensive, mapped from YANG variable /brocade_mpls_rpc/show_mpls_rsvp_session_extensive/output/mpls_rsvp_session_extensive (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_mpls_rsvp_session_extensive is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mpls_rsvp_session_extensive() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=mpls_rsvp_session_extensive.mpls_rsvp_session_extensive, is_container='container', presence=False, yang_name="mpls-rsvp-session-extensive", rest_name="mpls-rsvp-session-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """mpls_rsvp_session_extensive must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=mpls_rsvp_session_extensive.mpls_rsvp_session_extensive, is_container='container', presence=False, yang_name="mpls-rsvp-session-extensive", rest_name="mpls-rsvp-session-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""",
})
self.__mpls_rsvp_session_extensive = t
if hasattr(self, '_set'):
self._set()
def _unset_mpls_rsvp_session_extensive(self):
self.__mpls_rsvp_session_extensive = YANGDynClass(base=mpls_rsvp_session_extensive.mpls_rsvp_session_extensive, is_container='container', presence=False, yang_name="mpls-rsvp-session-extensive", rest_name="mpls-rsvp-session-extensive", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions=None, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)
mpls_rsvp_session_extensive = __builtin__.property(_get_mpls_rsvp_session_extensive, _set_mpls_rsvp_session_extensive)
_pyangbind_elements = {'mpls_rsvp_session_extensive': mpls_rsvp_session_extensive, }
| 50.725806 | 469 | 0.747536 |
acf4cda16135e0e1564790d6475ce1876a59b681 | 622 | py | Python | eats/languages/eu.py | kingsdigitallab/sdo-django | 28068ba9adb065ed36326bd23edd7778df79e03f | [
"MIT"
] | null | null | null | eats/languages/eu.py | kingsdigitallab/sdo-django | 28068ba9adb065ed36326bd23edd7778df79e03f | [
"MIT"
] | 5 | 2021-03-19T08:51:21.000Z | 2021-06-09T19:11:07.000Z | eats/languages/eu.py | kingsdigitallab/sdo-django | 28068ba9adb065ed36326bd23edd7778df79e03f | [
"MIT"
] | null | null | null | """Module defining the language rules for Basque."""
def sort_parts(parts):
"""Return a list of name parts sorted into display order for the
language."""
given = family = title = ''
for part in parts:
system_name_part_type = str(
part.name_part_type.system_name_part_type)
name_part = part.name_part
if system_name_part_type == 'given':
given = name_part
elif system_name_part_type == 'family':
family = name_part
elif system_name_part_type == 'terms of address':
title = name_part
return (title, given, family)
| 32.736842 | 68 | 0.631833 |
acf4cdc057372a77f58463ad70c630f8e639ce29 | 4,515 | py | Python | python3/koans/about_iteration.py | michalszczecinski/python_koans_my_solutions | 508e64e60f1e77dcf66c61522483588bd44ebe09 | [
"MIT"
] | null | null | null | python3/koans/about_iteration.py | michalszczecinski/python_koans_my_solutions | 508e64e60f1e77dcf66c61522483588bd44ebe09 | [
"MIT"
] | null | null | null | python3/koans/about_iteration.py | michalszczecinski/python_koans_my_solutions | 508e64e60f1e77dcf66c61522483588bd44ebe09 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import logging, sys
logger = logging.getLogger()
logger.level = logging.DEBUG
stream_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stream_handler)
class AboutIteration(Koan):
def test_iterators_are_a_type(self):
# stream_handler.stream = sys.stdout
it = iter(range(1,6))
total = 0
for num in it:
total += num
# logging.getLogger().info(num)
self.assertEqual(15 , total)
def test_iterating_with_next(self):
stages = iter(['alpha','beta','gamma'])
try:
self.assertEqual('alpha', next(stages))
next(stages)
self.assertEqual('gamma', next(stages))
next(stages)
except StopIteration as ex:
err_msg = 'Ran out of iterations'
self.assertRegex(err_msg, 'Ran out of iterations')
# ------------------------------------------------------------------
def add_ten(self, item):
return item + 10
def test_map_transforms_elements_of_a_list(self):
seq = [1, 2, 3]
mapped_seq = list()
mapping = map(self.add_ten, seq)
self.assertNotEqual(list, mapping.__class__)
self.assertEqual(map, mapping.__class__)
# In Python 3 built in iterator funcs return iterable view objects
# instead of lists
for item in mapping:
mapped_seq.append(item)
self.assertEqual([11,12,13], mapped_seq)
# Note, iterator methods actually return objects of iter type in
# python 3. In python 2 map() would give you a list.
def test_filter_selects_certain_items_from_a_list(self):
def is_even(item):
return (item % 2) == 0
seq = [1, 2, 3, 4, 5, 6]
even_numbers = list()
for item in filter(is_even, seq):
even_numbers.append(item)
self.assertEqual([2,4,6], even_numbers)
def test_just_return_first_item_found(self):
def is_big_name(item):
return len(item) > 4
names = ["Jim", "Bill", "Clarence", "Doug", "Eli"]
name = None
iterator = filter(is_big_name, names)
try:
name = next(iterator)
except StopIteration:
msg = 'Ran out of big names'
self.assertEqual('Clarence', name)
# ------------------------------------------------------------------
def add(self,accum,item):
return accum + item
def multiply(self,accum,item):
return accum * item
def test_reduce_will_blow_your_mind(self):
import functools
# As of Python 3 reduce() has been demoted from a builtin function
# to the functools module.
result = functools.reduce(self.add, [2, 3, 4])
self.assertEqual(int, result.__class__)
# Reduce() syntax is same as Python 2
self.assertEqual(9, result)
result2 = functools.reduce(self.multiply, [2, 3, 4], 1)
self.assertEqual(24, result2)
# Extra Credit:
# Describe in your own words what reduce does.
# reduce applies function to the sequence ina cumulative way,
# meaning it each item of the sequence and calculates the next result based on the previous result of the function
# for example reduce(lambda x,y: x-y, [1,2,3]) => -4
# reduce keyword reduces the sequence with function to one number
# ------------------------------------------------------------------
def test_use_pass_for_iterations_with_no_body(self):
for num in range(1,5):
pass
self.assertEqual(4, num)
# ------------------------------------------------------------------
def test_all_iteration_methods_work_on_any_sequence_not_just_lists(self):
# Ranges are an iterable sequence
result = map(self.add_ten, range(1,4))
self.assertEqual([11,12,13], list(result))
try:
file = open("example_file.txt")
try:
def make_upcase(line):
return line.strip().upper()
upcase_lines = map(make_upcase, file.readlines())
self.assertEqual(['THIS','IS','A','TEST'], list(upcase_lines))
finally:
# Arg, this is ugly.
# We will figure out how to fix this later.
file.close()
except IOError:
# should never happen
self.fail()
| 29.509804 | 122 | 0.557918 |
acf4cdc8067ae5b940b0ae8401bf65e710832989 | 2,851 | py | Python | functions/TStudentMean.py | h-kyouma/IDS_statistics | b9eb26b757dfb1ee88e4a78f5b6ee304fca39420 | [
"MIT"
] | null | null | null | functions/TStudentMean.py | h-kyouma/IDS_statistics | b9eb26b757dfb1ee88e4a78f5b6ee304fca39420 | [
"MIT"
] | null | null | null | functions/TStudentMean.py | h-kyouma/IDS_statistics | b9eb26b757dfb1ee88e4a78f5b6ee304fca39420 | [
"MIT"
] | null | null | null | # Jacek Wolski 12/03/2022 - T-Student test for equality of mean values
import csv
import math
from scipy.stats import t
def t_student_test_mean(csv_file='data\TStudentMean.csv'):
print("T-Student test for equality of mean values.\nData file: " + csv_file)
rows = []
with open(csv_file, 'r',encoding = 'utf-8-sig') as file:
csvreader = csv.DictReader(file, delimiter =';')
alpha_row = next(csvreader)
alpha = float(alpha_row['Col2'])
sample_size_row = next(csvreader)
sample1_size = int(sample_size_row['Col1'])
sample2_size = int(sample_size_row['Col2'])
data_headers_row = next(csvreader)
for row in csvreader:
rows.append(row)
print('Alpha: ' + str(alpha))
print('Data headers: ' + data_headers_row['Col1'] + ' ' + data_headers_row['Col2'])
print(rows)
print('Sample size of: ' + data_headers_row['Col1'] + ' is ' + str(sample1_size))
print('Sample size of: ' + data_headers_row['Col2'] + ' is ' + str(sample2_size))
# compute means
sum1 = 0.0
sum2 = 0.0
for row in rows:
sample1 = row['Col1']
sample2 = row['Col2']
if sample1 != '':
sum1 = sum1 + float(sample1)
if sample2 != '':
sum2 = sum2 + float(sample2)
mean1 = sum1 / sample1_size
mean2 = sum2 / sample2_size
# compute variances
var1 = 0.0
var2 = 0.0
for row in rows:
sample1 = row['Col1']
sample2 = row['Col2']
if sample1 != '':
var1 = var1 + abs(float(sample1) - mean1)**2
if sample2 != '':
var2 = var2 + abs(float(sample2) - mean2)**2
variance1 = var1 / (sample1_size - 1)
variance2 = var2 / (sample2_size - 1)
result = (mean1 - mean2) / math.sqrt((var1 + var2)/(sample1_size + sample2_size - 2)*(1.0/sample1_size + 1.0/sample2_size))
print('Sample ' + data_headers_row['Col1'] + ' mean is ' + str(mean1) + ' and variance is ' + str(variance1))
print('Sample ' + data_headers_row['Col2'] + ' mean is ' + str(mean2) + ' and variance is ' + str(variance2))
# look up T-Student table
t_student = t.ppf(1 - alpha/2, sample1_size + sample2_size - 2)
result_list = {'alpha': alpha, 'result': result, 'T-Student': t_student}
return result_list
if __name__ == '__main__':
test_result = t_student_test_mean()
print("T-Student Test results: " + str(test_result))
if abs(test_result['result']) < test_result['T-Student']:
print("HO hypothesis NOT rejected (result is abs(" + str(test_result['result']) + "), which is smaller than T-Student value = " + str(test_result['T-Student']) + ")")
else:
print("HO hypothesis is rejected (result is abs(" + str(test_result['result']) + "(, which is greater of equal to T-Student value = " + str(test_result['T-Student']) + ")")
| 40.15493 | 180 | 0.611364 |
acf4d01dd1c6bac0e63b69bdc04c3c6c71f991b2 | 5,560 | py | Python | tensorflow/g3doc/tutorials/mnist/mnist.py | vsilyaev/tensorflow | f41959ccb2d9d4c722fe8fc3351401d53bcf4900 | [
"Apache-2.0"
] | 4 | 2021-06-11T09:43:32.000Z | 2021-11-17T11:15:52.000Z | tensorflow/g3doc/tutorials/mnist/mnist.py | TheRockStarDBA/tensorflow | db0b5da485e1d1f23003ee08ed2e191451ee0319 | [
"Apache-2.0"
] | null | null | null | tensorflow/g3doc/tutorials/mnist/mnist.py | TheRockStarDBA/tensorflow | db0b5da485e1d1f23003ee08ed2e191451ee0319 | [
"Apache-2.0"
] | 2 | 2015-11-13T21:11:49.000Z | 2015-11-29T04:13:49.000Z | """Builds the MNIST network.
Implements the inference/loss/training pattern for model building.
1. inference() - Builds the model as far as is required for running the network
forward to make predictions.
2. loss() - Adds to the inference model the layers required to generate loss.
3. training() - Adds to the loss model the Ops required to generate and
apply gradients.
This file is used by the various "fully_connected_*.py" files and not meant to
be run.
TensorFlow install instructions:
https://tensorflow.org/get_started/os_setup.html
MNIST tutorial:
https://tensorflow.org/tutorials/mnist/tf/index.html
"""
import math
import tensorflow.python.platform
import tensorflow as tf
# The MNIST dataset has 10 classes, representing the digits 0 through 9.
NUM_CLASSES = 10
# The MNIST images are always 28x28 pixels.
IMAGE_SIZE = 28
IMAGE_PIXELS = IMAGE_SIZE * IMAGE_SIZE
def inference(images, hidden1_units, hidden2_units):
"""Build the MNIST model up to where it may be used for inference.
Args:
images: Images placeholder, from inputs().
hidden1: Size of the first hidden layer.
hidden2: Size of the second hidden layer.
Returns:
softmax_linear: Output tensor with the computed logits.
"""
# Hidden 1
with tf.name_scope('hidden1') as scope:
weights = tf.Variable(
tf.truncated_normal([IMAGE_PIXELS, hidden1_units],
stddev=1.0 / math.sqrt(float(IMAGE_PIXELS))),
name='weights')
biases = tf.Variable(tf.zeros([hidden1_units]),
name='biases')
hidden1 = tf.nn.relu(tf.matmul(images, weights) + biases)
# Hidden 2
with tf.name_scope('hidden2') as scope:
weights = tf.Variable(
tf.truncated_normal([hidden1_units, hidden2_units],
stddev=1.0 / math.sqrt(float(hidden1_units))),
name='weights')
biases = tf.Variable(tf.zeros([hidden2_units]),
name='biases')
hidden2 = tf.nn.relu(tf.matmul(hidden1, weights) + biases)
# Linear
with tf.name_scope('softmax_linear') as scope:
weights = tf.Variable(
tf.truncated_normal([hidden2_units, NUM_CLASSES],
stddev=1.0 / math.sqrt(float(hidden2_units))),
name='weights')
biases = tf.Variable(tf.zeros([NUM_CLASSES]),
name='biases')
logits = tf.matmul(hidden2, weights) + biases
return logits
def loss(logits, labels):
"""Calculates the loss from the logits and the labels.
Args:
logits: Logits tensor, float - [batch_size, NUM_CLASSES].
labels: Labels tensor, int32 - [batch_size].
Returns:
loss: Loss tensor of type float.
"""
# Convert from sparse integer labels in the range [0, NUM_CLASSSES)
# to 1-hot dense float vectors (that is we will have batch_size vectors,
# each with NUM_CLASSES values, all of which are 0.0 except there will
# be a 1.0 in the entry corresponding to the label).
batch_size = tf.size(labels)
labels = tf.expand_dims(labels, 1)
indices = tf.expand_dims(tf.range(0, batch_size, 1), 1)
concated = tf.concat(1, [indices, labels])
onehot_labels = tf.sparse_to_dense(
concated, tf.pack([batch_size, NUM_CLASSES]), 1.0, 0.0)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits,
onehot_labels,
name='xentropy')
loss = tf.reduce_mean(cross_entropy, name='xentropy_mean')
return loss
def training(loss, learning_rate):
"""Sets up the training Ops.
Creates a summarizer to track the loss over time in TensorBoard.
Creates an optimizer and applies the gradients to all trainable variables.
The Op returned by this function is what must be passed to the
`sess.run()` call to cause the model to train.
Args:
loss: Loss tensor, from loss().
learning_rate: The learning rate to use for gradient descent.
Returns:
train_op: The Op for training.
"""
# Add a scalar summary for the snapshot loss.
tf.scalar_summary(loss.op.name, loss)
# Create the gradient descent optimizer with the given learning rate.
optimizer = tf.train.GradientDescentOptimizer(learning_rate)
# Create a variable to track the global step.
global_step = tf.Variable(0, name='global_step', trainable=False)
# Use the optimizer to apply the gradients that minimize the loss
# (and also increment the global step counter) as a single training step.
train_op = optimizer.minimize(loss, global_step=global_step)
return train_op
def evaluation(logits, labels):
"""Evaluate the quality of the logits at predicting the label.
Args:
logits: Logits tensor, float - [batch_size, NUM_CLASSES].
labels: Labels tensor, int32 - [batch_size], with values in the
range [0, NUM_CLASSES).
Returns:
A scalar int32 tensor with the number of examples (out of batch_size)
that were predicted correctly.
"""
# For a classifier model, we can use the in_top_k Op.
# It returns a bool tensor with shape [batch_size] that is true for
# the examples where the label's is was in the top k (here k=1)
# of all logits for that example.
correct = tf.nn.in_top_k(logits, labels, 1)
# Return the number of true entries.
return tf.reduce_sum(tf.cast(correct, tf.int32))
| 37.315436 | 79 | 0.659532 |
acf4d0a748dc8402967689d863b1b66a6cdcfe7e | 6,721 | py | Python | Code/generate_dataset_types.py | asmitapoddar/Deep-Learning-DNA-Sequences | 90bcd252485b86ab603baf967bb61dda29beb5e2 | [
"MIT"
] | 5 | 2021-06-16T16:54:36.000Z | 2022-02-24T07:04:58.000Z | Code/generate_dataset_types.py | asmitapoddar/Deep-Learning-DNA-Sequences | 90bcd252485b86ab603baf967bb61dda29beb5e2 | [
"MIT"
] | null | null | null | Code/generate_dataset_types.py | asmitapoddar/Deep-Learning-DNA-Sequences | 90bcd252485b86ab603baf967bb61dda29beb5e2 | [
"MIT"
] | 2 | 2022-02-14T09:22:25.000Z | 2022-03-02T07:14:36.000Z | import pandas as pd
from dataset_utils import *
columns = ['Type', 'Sequence', 'Interval_Indices', 'GeneID', 'Gene_Strand', 'Boundary_Position']
global finaldf
finaldf = pd.DataFrame(columns=columns) # For sanity check
def boundary_exon_intron_3classification(exon_boundary_set_final,exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
'''
Function to create training set [start intervals/ end intervals] (positive samples)
:param exon_boundary_intervals_final: list of intervals
:param exon_boundary_final: list of int containing the exon boundaries
:param within_exon_seq_intervals: list of intervals
:param within_intron_seq_intervals: list of intervals
:param gene: gene information (JSON)
:param MAX_LENGTH:
:return: 2 lists
list of str: training_set_x containing the DNA seqeunces
list of int: training_set_y containing the exon boundary position
Sequences containing Exon Boundary: Class 0
Purely Exonic Sequences: Class 1
Purely Intronic Sequences: Class 2
'''
sxboundary, syboundary = create_training_set(exon_boundary_set_final,
[0] * len(exon_boundary_final), gene, MAX_LENGTH)
sxexon, syexon = create_training_set(within_exon_seq_intervals,
[1] * len(within_exon_seq_intervals), gene, MAX_LENGTH)
sxintron, syintron = create_training_set(within_intron_seq_intervals,
[2] * len(within_intron_seq_intervals), gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
dfexon = sanity_check(sxexon, within_exon_seq_intervals, columns, 'Exon', gene)
dfintron = sanity_check(sxintron, within_intron_seq_intervals, columns, 'Intron', gene)
sanity_df = [dfboundary, dfexon, dfintron]
return (sxboundary + sxexon + sxintron, syboundary + syexon + syintron), sanity_df
def boundary_orNot_2classification(exon_boundary_set_final,exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
sxboundary, syboundary = create_training_set(exon_boundary_set_final,
[0] * len(exon_boundary_final), gene, MAX_LENGTH)
sxexon, syexon = create_training_set(within_exon_seq_intervals,
[1] * len(within_exon_seq_intervals), gene, MAX_LENGTH)
sxintron, syintron = create_training_set(within_intron_seq_intervals,
[1] * len(within_intron_seq_intervals), gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
dfexon = sanity_check(sxexon, within_exon_seq_intervals, columns, 'Exon', gene)
dfintron = sanity_check(sxintron, within_intron_seq_intervals, columns, 'Intron', gene)
sanity_df = [dfboundary, dfexon, dfintron]
return (sxboundary + sxexon + sxintron, syboundary + syexon + syintron), sanity_df
def find_boundary_Nclassification(exon_boundary_set_final, exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
'''
Sequences containing Exon Boundary: Boundary point - OFFSET_RANGE[0] (0 indexed classes)
'''
sxboundary, syboundary = create_training_set(exon_boundary_set_final,
[x - OFFSET_RANGE[0] for x in exon_boundary_final], gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
return (sxboundary, syboundary), [dfboundary]
def boundaryCertainPoint_orNot_2classification(exon_boundary_set_final, exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
assert OFFSET_RANGE[0]==OFFSET_RANGE[1], "Have only one seq for particular boundary"
sxboundary, syboundary = create_training_set(exon_boundary_set_final,
[0] * len(exon_boundary_final), gene, MAX_LENGTH)
sxexon, syexon = create_training_set(within_exon_seq_intervals,
[1] * len(within_exon_seq_intervals), gene, MAX_LENGTH)
sxintron, syintron = create_training_set(within_intron_seq_intervals,
[1] * len(within_intron_seq_intervals), gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
dfexon = sanity_check(sxexon, within_exon_seq_intervals, columns, 'Exon', gene)
dfintron = sanity_check(sxintron, within_intron_seq_intervals, columns, 'Intron', gene)
sanity_df = [dfboundary, dfexon, dfintron]
return (sxboundary + sxexon + sxintron, syboundary + syexon + syintron), sanity_df
def seq_1classification(exon_boundary_set_final, exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
sxboundary, syboundary = create_training_set(exon_boundary_set_final,
[0] * len(exon_boundary_final), gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
return (sxboundary, syboundary), [dfboundary]
def find_boundary_regression(exon_boundary_set_final, exon_boundary_final,
within_exon_seq_intervals, within_intron_seq_intervals,
gene, MAX_LENGTH, OFFSET_RANGE):
sxboundary, syboundary = create_training_set(
exon_boundary_set_final, exon_boundary_final, gene, MAX_LENGTH)
# For Sanity Checking -
dfboundary = sanity_check(sxboundary, exon_boundary_set_final, columns, 'Boundary', gene, exon_boundary_final)
return (sxboundary, syboundary), [dfboundary]
| 60.54955 | 118 | 0.659574 |
acf4d0f1a17873b23c764d82d99691e05d1250b6 | 3,838 | py | Python | nextcode/services/pipelines/service.py | Haffi/nextcode-python-sdk | b70baa848cb6326fb0e7ee0e4167c41dcc45e085 | [
"MIT"
] | 7 | 2019-10-23T17:22:50.000Z | 2021-04-17T21:44:28.000Z | nextcode/services/pipelines/service.py | Haffi/nextcode-python-sdk | b70baa848cb6326fb0e7ee0e4167c41dcc45e085 | [
"MIT"
] | 8 | 2019-11-07T16:41:01.000Z | 2021-09-13T14:33:28.000Z | nextcode/services/pipelines/service.py | Haffi/nextcode-python-sdk | b70baa848cb6326fb0e7ee0e4167c41dcc45e085 | [
"MIT"
] | 4 | 2019-11-08T13:59:55.000Z | 2021-11-07T13:49:21.000Z | """
Service class
------------------
Service object for interfacing with the Pipelines Service API.
This class instance is used to communicate with a RESTFul service. The `post_job` method
creates a new job on the server and `find_job` and `get_jobs` allow you to inspect running
and past workflow jobs.
Note: This service has not been fully integrated into the SDK and access is still quite raw.
"""
import time
import os
from typing import Optional, List, Union, Dict
from ...services import BaseService
from ...client import Client
from ...exceptions import NotFound
from .job import PipelineJob
from .exceptions import JobError
from ...packagelocal import package_and_upload
import logging
SERVICE_PATH = "pipelines-service"
log = logging.getLogger(__name__)
class Service(BaseService):
"""
A connection to the pipelines service API server
"""
def __init__(self, client: Client, *args, **kwargs) -> None:
super(Service, self).__init__(client, SERVICE_PATH, *args, **kwargs)
def get_pipelines(self) -> List:
"""
Returns the pipelines available on the current server
Refer to the API documentation for the Pipelines service to see formatting of data.
:return: List of pipelines
"""
resp = self.session.get(self.session.url_from_endpoint("pipelines"))
pipelines = resp.json()["pipelines"]
return pipelines
def get_projects(self) -> List:
"""
Returns the projects that have been created on the current server
Refer to the API documentation for the Pipelines service to see formatting of data.
:return: List of projects
"""
resp = self.session.get(self.session.url_from_endpoint("projects"))
projects = resp.json()["projects"]
return projects
def find_job(self, job_id: Union[int, str]) -> PipelineJob:
"""
Return a job proxy object
"""
jobs_endpoint = self.session.url_from_endpoint("jobs")
data: Dict = {"limit": 1}
if job_id == "latest":
data["user_name"] = self.current_user.get("email")
else:
try:
data["job_id"] = int(job_id)
except ValueError:
raise NotFound(
"job_id must be an integer or 'latest', not '%s'" % job_id
)
resp = self.session.get(jobs_endpoint, json=data)
jobs = resp.json()["jobs"]
if not jobs:
raise NotFound("Job not found")
job = jobs[0]
return PipelineJob(self.session, job["job_id"], job)
def get_jobs(
self,
user_name: Optional[str] = None,
status: Optional[str] = None,
project: Optional[str] = None,
pipeline: Optional[str] = None,
limit: Optional[int] = 50,
) -> List[PipelineJob]:
"""
Get a list of jobs satisfying the supplied criteria
:param user_name: The user who created the job
:param status: Current status of jobs
:param project: Filter by project
:param pipeline: Filter by pipeline name
:param limit: Maximum number of jobs to return
"""
data: Dict = {"limit": limit}
if user_name:
data["user_name"] = user_name
if status:
data["status"] = status
if project:
data["project_name"] = project
if pipeline:
data["pipeline_name"] = pipeline
st = time.time()
resp = self.session.get(self.session.url_from_endpoint("jobs"), json=data)
jobs = resp.json()["jobs"]
log.info("Retrieved %s jobs in %.2f sec", len(jobs), time.time() - st)
ret = []
for job in jobs:
ret.append(PipelineJob(self.session, job["job_id"], job))
return ret
| 31.459016 | 93 | 0.61334 |
acf4d10c6e35e535a80ddfa9cd66c4a6614e8350 | 2,780 | py | Python | Supervised/setup_mnist.py | IBM/UAE | e493de6142ec519fbabfed2e89f718fdd1415a4b | [
"Apache-2.0"
] | 4 | 2021-03-03T12:52:46.000Z | 2021-09-29T02:06:31.000Z | Supervised/setup_mnist.py | IBM/UAE | e493de6142ec519fbabfed2e89f718fdd1415a4b | [
"Apache-2.0"
] | 1 | 2021-03-16T00:52:59.000Z | 2021-05-20T19:42:11.000Z | Supervised/setup_mnist.py | IBM/UAE | e493de6142ec519fbabfed2e89f718fdd1415a4b | [
"Apache-2.0"
] | 2 | 2021-03-15T14:38:24.000Z | 2021-04-23T07:40:21.000Z | ## setup_mnist.py -- mnist data and model loading code
##
import tensorflow as tf
import numpy as np
import os
import pickle
import gzip
import urllib.request
from keras import backend as K
from keras.initializers import Constant, glorot_normal
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten, Layer, Input
from keras.layers import Conv2D, MaxPooling2D
from keras.utils import np_utils
from keras.models import load_model
def extract_data(filename, num_images):
with gzip.open(filename) as bytestream:
bytestream.read(16)
buf = bytestream.read(num_images*28*28)
data = np.frombuffer(buf, dtype=np.uint8).astype(np.float32)
data = (data / 255) - 0.5
data = data.reshape(num_images, 28, 28, 1)
return data
def extract_labels(filename, num_images):
with gzip.open(filename) as bytestream:
bytestream.read(8)
buf = bytestream.read(1 * num_images)
labels = np.frombuffer(buf, dtype=np.uint8)
return (np.arange(10) == labels[:, None]).astype(np.float32)
class MNIST:
def __init__(self):
if not os.path.exists("data"):
os.mkdir("data")
files = ["train-images-idx3-ubyte.gz",
"t10k-images-idx3-ubyte.gz",
"train-labels-idx1-ubyte.gz",
"t10k-labels-idx1-ubyte.gz"]
for name in files:
urllib.request.urlretrieve('http://yann.lecun.com/exdb/mnist/' + name, "data/"+name)
train_data = extract_data("data/train-images-idx3-ubyte.gz", 60000)+0.5
train_labels = extract_labels("data/train-labels-idx1-ubyte.gz", 60000)
self.test_data = extract_data("data/t10k-images-idx3-ubyte.gz", 10000)+0.5
self.test_labels = extract_labels("data/t10k-labels-idx1-ubyte.gz", 10000)
VALIDATION_SIZE = 5000
self.validation_data = train_data[:VALIDATION_SIZE, :, :, :]
self.validation_labels = train_labels[:VALIDATION_SIZE]
self.train_data = train_data[VALIDATION_SIZE:, :, :, :]
self.train_labels = train_labels[VALIDATION_SIZE:]
class MNISTModel:
def __init__(self, restore, session=None):
self.num_channels = 1
self.image_size = 28
self.num_labels = 10
def fn(correct, predicted):
return tf.nn.softmax_cross_entropy_with_logits(labels=correct,logits=predicted)
self.model = load_model(restore,custom_objects={'fn':fn})
from keras.models import Model
self.output = Model(input=self.model.input, outputs=self.model.layers[1].output)
def predict(self, data):
return self.model(data)
def conv1(self, data):
return self.output(data)
| 35.189873 | 100 | 0.655036 |
acf4d13b32232f77bf4f95c2822da267743e500d | 120,155 | py | Python | stubs.min/System/Windows/Controls/__init___parts/GroupBox.py | ricardyn/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | 1 | 2021-02-02T13:39:16.000Z | 2021-02-02T13:39:16.000Z | stubs.min/System/Windows/Controls/__init___parts/GroupBox.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | stubs.min/System/Windows/Controls/__init___parts/GroupBox.py | hdm-dt-fb/ironpython-stubs | 4d2b405eda3ceed186e8adca55dd97c332c6f49d | [
"MIT"
] | null | null | null | class GroupBox(HeaderedContentControl,IResource,IAnimatable,IInputElement,IFrameworkInputElement,ISupportInitialize,IHaveResources,IQueryAmbient,IAddChild):
"""
Represents a control that creates a container that has a border and a header for user interface (UI) content.
GroupBox()
"""
def AddChild(self,*args):
"""
AddChild(self: ContentControl,value: object)
Adds a specified object as the child of a
System.Windows.Controls.ContentControl.
value: The object to add.
AddChild(self: Window_16$17,value: object)AddChild(self: Label_17$18,value: object)AddChild(self: Button_19$20,value: object)AddChild(self: CheckBox_20$21,value: object)
"""
pass
def AddLogicalChild(self,*args):
"""
AddLogicalChild(self: FrameworkElement,child: object)
Adds the provided object to the logical tree of this element.
child: Child element to be added.
AddLogicalChild(self: Window_16$17,child: object)AddLogicalChild(self: Label_17$18,child: object)AddLogicalChild(self: TextBox_18$19,child: object)AddLogicalChild(self: Button_19$20,child: object)AddLogicalChild(self: CheckBox_20$21,child: object)AddLogicalChild(self: ComboBox_21$22,child: object)AddLogicalChild(self: Separator_22$23,child: object)
"""
pass
def AddText(self,*args):
"""
AddText(self: ContentControl,text: str)
Adds a specified text string to a System.Windows.Controls.ContentControl.
text: The string to add.
AddText(self: Window_16$17,text: str)AddText(self: Label_17$18,text: str)AddText(self: Button_19$20,text: str)AddText(self: CheckBox_20$21,text: str)
"""
pass
def AddVisualChild(self,*args):
"""
AddVisualChild(self: Visual,child: Visual)
Defines the parent-child relationship between two visuals.
child: The child visual object to add to parent visual.
AddVisualChild(self: Window_16$17,child: Window_16$17)AddVisualChild(self: Label_17$18,child: Label_17$18)AddVisualChild(self: TextBox_18$19,child: TextBox_18$19)AddVisualChild(self: Button_19$20,child: Button_19$20)AddVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)AddVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)AddVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def ArrangeCore(self,*args):
"""
ArrangeCore(self: FrameworkElement,finalRect: Rect)
Implements System.Windows.UIElement.ArrangeCore(System.Windows.Rect) (defined
as virtual in System.Windows.UIElement) and seals the implementation.
finalRect: The final area within the parent that this element should use to arrange itself
and its children.
ArrangeCore(self: Window_16$17,finalRect: Rect)ArrangeCore(self: Label_17$18,finalRect: Rect)ArrangeCore(self: TextBox_18$19,finalRect: Rect)ArrangeCore(self: Button_19$20,finalRect: Rect)ArrangeCore(self: CheckBox_20$21,finalRect: Rect)ArrangeCore(self: ComboBox_21$22,finalRect: Rect)ArrangeCore(self: Separator_22$23,finalRect: Rect)
"""
pass
def ArrangeOverride(self,*args):
"""
ArrangeOverride(self: Control,arrangeBounds: Size) -> Size
Called to arrange and size the content of a System.Windows.Controls.Control
object.
arrangeBounds: The computed size that is used to arrange the content.
Returns: The size of the control.
ArrangeOverride(self: Window_16$17,arrangeBounds: Size) -> Size
ArrangeOverride(self: Label_17$18,arrangeBounds: Size) -> Size
ArrangeOverride(self: TextBox_18$19,arrangeBounds: Size) -> Size
ArrangeOverride(self: Button_19$20,arrangeBounds: Size) -> Size
ArrangeOverride(self: CheckBox_20$21,arrangeBounds: Size) -> Size
ArrangeOverride(self: ComboBox_21$22,arrangeBounds: Size) -> Size
ArrangeOverride(self: Separator_22$23,arrangeBounds: Size) -> Size
"""
pass
def GetLayoutClip(self,*args):
"""
GetLayoutClip(self: FrameworkElement,layoutSlotSize: Size) -> Geometry
Returns a geometry for a clipping mask. The mask applies if the layout system
attempts to arrange an element that is larger than the available display space.
layoutSlotSize: The size of the part of the element that does visual presentation.
Returns: The clipping geometry.
GetLayoutClip(self: Window_16$17,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Label_17$18,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: TextBox_18$19,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Button_19$20,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: CheckBox_20$21,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: ComboBox_21$22,layoutSlotSize: Size) -> Geometry
GetLayoutClip(self: Separator_22$23,layoutSlotSize: Size) -> Geometry
"""
pass
def GetTemplateChild(self,*args):
"""
GetTemplateChild(self: FrameworkElement,childName: str) -> DependencyObject
Returns the named element in the visual tree of an instantiated
System.Windows.Controls.ControlTemplate.
childName: Name of the child to find.
Returns: The requested element. May be null if no element of the requested name exists.
GetTemplateChild(self: Window_16$17,childName: str) -> DependencyObject
GetTemplateChild(self: Label_17$18,childName: str) -> DependencyObject
GetTemplateChild(self: TextBox_18$19,childName: str) -> DependencyObject
GetTemplateChild(self: Button_19$20,childName: str) -> DependencyObject
GetTemplateChild(self: CheckBox_20$21,childName: str) -> DependencyObject
GetTemplateChild(self: ComboBox_21$22,childName: str) -> DependencyObject
GetTemplateChild(self: Separator_22$23,childName: str) -> DependencyObject
"""
pass
def GetUIParentCore(self,*args):
"""
GetUIParentCore(self: FrameworkElement) -> DependencyObject
Returns an alternative logical parent for this element if there is no visual
parent.
Returns: Returns something other than null whenever a WPF framework-level implementation
of this method has a non-visual parent connection.
GetUIParentCore(self: Window_16$17) -> DependencyObject
GetUIParentCore(self: Label_17$18) -> DependencyObject
GetUIParentCore(self: TextBox_18$19) -> DependencyObject
GetUIParentCore(self: Button_19$20) -> DependencyObject
GetUIParentCore(self: CheckBox_20$21) -> DependencyObject
GetUIParentCore(self: ComboBox_21$22) -> DependencyObject
GetUIParentCore(self: Separator_22$23) -> DependencyObject
"""
pass
def GetVisualChild(self,*args):
"""
GetVisualChild(self: FrameworkElement,index: int) -> Visual
Overrides System.Windows.Media.Visual.GetVisualChild(System.Int32),and returns
a child at the specified index from a collection of child elements.
index: The zero-based index of the requested child element in the collection.
Returns: The requested child element. This should not return null; if the provided index
is out of range,an exception is thrown.
GetVisualChild(self: Window_16$17,index: int) -> Visual
GetVisualChild(self: Label_17$18,index: int) -> Visual
GetVisualChild(self: TextBox_18$19,index: int) -> Visual
GetVisualChild(self: Button_19$20,index: int) -> Visual
GetVisualChild(self: CheckBox_20$21,index: int) -> Visual
GetVisualChild(self: ComboBox_21$22,index: int) -> Visual
GetVisualChild(self: Separator_22$23,index: int) -> Visual
"""
pass
def HitTestCore(self,*args):
"""
HitTestCore(self: UIElement,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.GeometryHitTestPara
meters) to supply base element hit testing behavior (returning
System.Windows.Media.GeometryHitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated geometry.
HitTestCore(self: UIElement,hitTestParameters: PointHitTestParameters) -> HitTestResult
Implements
System.Windows.Media.Visual.HitTestCore(System.Windows.Media.PointHitTestParamet
ers) to supply base element hit testing behavior (returning
System.Windows.Media.HitTestResult).
hitTestParameters: Describes the hit test to perform,including the initial hit point.
Returns: Results of the test,including the evaluated point.
HitTestCore(self: Window_16$17,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Window_16$17,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Label_17$18,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Label_17$18,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: TextBox_18$19,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: TextBox_18$19,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Button_19$20,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Button_19$20,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: CheckBox_20$21,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: CheckBox_20$21,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: ComboBox_21$22,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: ComboBox_21$22,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
HitTestCore(self: Separator_22$23,hitTestParameters: PointHitTestParameters) -> HitTestResult
HitTestCore(self: Separator_22$23,hitTestParameters: GeometryHitTestParameters) -> GeometryHitTestResult
"""
pass
def MeasureCore(self,*args):
"""
MeasureCore(self: FrameworkElement,availableSize: Size) -> Size
Implements basic measure-pass layout system behavior for
System.Windows.FrameworkElement.
availableSize: The available size that the parent element can give to the child elements.
Returns: The desired size of this element in layout.
MeasureCore(self: Window_16$17,availableSize: Size) -> Size
MeasureCore(self: Label_17$18,availableSize: Size) -> Size
MeasureCore(self: TextBox_18$19,availableSize: Size) -> Size
MeasureCore(self: Button_19$20,availableSize: Size) -> Size
MeasureCore(self: CheckBox_20$21,availableSize: Size) -> Size
MeasureCore(self: ComboBox_21$22,availableSize: Size) -> Size
MeasureCore(self: Separator_22$23,availableSize: Size) -> Size
"""
pass
def MeasureOverride(self,*args):
"""
MeasureOverride(self: Control,constraint: Size) -> Size
Called to remeasure a control.
constraint: The maximum size that the method can return.
Returns: The size of the control,up to the maximum specified by constraint.
MeasureOverride(self: Window_16$17,availableSize: Size) -> Size
MeasureOverride(self: Label_17$18,constraint: Size) -> Size
MeasureOverride(self: TextBox_18$19,constraint: Size) -> Size
MeasureOverride(self: Button_19$20,constraint: Size) -> Size
MeasureOverride(self: CheckBox_20$21,constraint: Size) -> Size
MeasureOverride(self: ComboBox_21$22,constraint: Size) -> Size
MeasureOverride(self: Separator_22$23,constraint: Size) -> Size
"""
pass
def OnAccessKey(self,*args):
"""
OnAccessKey(self: GroupBox,e: AccessKeyEventArgs)
Responds when the System.Windows.Controls.AccessText.AccessKey for the
System.Windows.Controls.GroupBox is pressed.
e: The event information.
"""
pass
def OnChildDesiredSizeChanged(self,*args):
"""
OnChildDesiredSizeChanged(self: UIElement,child: UIElement)
Supports layout behavior when a child element is resized.
child: The child element that is being resized.
OnChildDesiredSizeChanged(self: Window_16$17,child: Window_16$17)OnChildDesiredSizeChanged(self: Label_17$18,child: Label_17$18)OnChildDesiredSizeChanged(self: TextBox_18$19,child: TextBox_18$19)OnChildDesiredSizeChanged(self: Button_19$20,child: Button_19$20)OnChildDesiredSizeChanged(self: CheckBox_20$21,child: CheckBox_20$21)OnChildDesiredSizeChanged(self: ComboBox_21$22,child: ComboBox_21$22)OnChildDesiredSizeChanged(self: Separator_22$23,child: Separator_22$23)
"""
pass
def OnContentChanged(self,*args):
"""
OnContentChanged(self: ContentControl,oldContent: object,newContent: object)
Called when the System.Windows.Controls.ContentControl.Content property changes.
oldContent: The old value of the System.Windows.Controls.ContentControl.Content property.
newContent: The new value of the System.Windows.Controls.ContentControl.Content property.
OnContentChanged(self: Window_16$17,oldContent: object,newContent: object)OnContentChanged(self: Label_17$18,oldContent: object,newContent: object)OnContentChanged(self: Button_19$20,oldContent: object,newContent: object)OnContentChanged(self: CheckBox_20$21,oldContent: object,newContent: object)
"""
pass
def OnContentStringFormatChanged(self,*args):
"""
OnContentStringFormatChanged(self: ContentControl,oldContentStringFormat: str,newContentStringFormat: str)
Occurs when the System.Windows.Controls.ContentControl.ContentStringFormat
property changes.
oldContentStringFormat: The old value of System.Windows.Controls.ContentControl.ContentStringFormat.
newContentStringFormat: The new value of System.Windows.Controls.ContentControl.ContentStringFormat.
OnContentStringFormatChanged(self: Window_16$17,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: Label_17$18,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: Button_19$20,oldContentStringFormat: str,newContentStringFormat: str)OnContentStringFormatChanged(self: CheckBox_20$21,oldContentStringFormat: str,newContentStringFormat: str)
"""
pass
def OnContentTemplateChanged(self,*args):
"""
OnContentTemplateChanged(self: ContentControl,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)
Called when the System.Windows.Controls.ContentControl.ContentTemplate property
changes.
oldContentTemplate: The old value of the System.Windows.Controls.ContentControl.ContentTemplate
property.
newContentTemplate: The new value of the System.Windows.Controls.ContentControl.ContentTemplate
property.
OnContentTemplateChanged(self: Window_16$17,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: Label_17$18,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: Button_19$20,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)OnContentTemplateChanged(self: CheckBox_20$21,oldContentTemplate: DataTemplate,newContentTemplate: DataTemplate)
"""
pass
def OnContentTemplateSelectorChanged(self,*args):
"""
OnContentTemplateSelectorChanged(self: ContentControl,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)
Called when the System.Windows.Controls.ContentControl.ContentTemplateSelector
property changes.
oldContentTemplateSelector: The old value of the
System.Windows.Controls.ContentControl.ContentTemplateSelector property.
newContentTemplateSelector: The new value of the
System.Windows.Controls.ContentControl.ContentTemplateSelector property.
OnContentTemplateSelectorChanged(self: Window_16$17,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: Label_17$18,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: Button_19$20,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)OnContentTemplateSelectorChanged(self: CheckBox_20$21,oldContentTemplateSelector: DataTemplateSelector,newContentTemplateSelector: DataTemplateSelector)
"""
pass
def OnContextMenuClosing(self,*args):
"""
OnContextMenuClosing(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled
System.Windows.FrameworkElement.ContextMenuClosing routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: Provides data about the event.
OnContextMenuClosing(self: Window_16$17,e: ContextMenuEventArgs)OnContextMenuClosing(self: Label_17$18,e: ContextMenuEventArgs)OnContextMenuClosing(self: TextBox_18$19,e: ContextMenuEventArgs)OnContextMenuClosing(self: Button_19$20,e: ContextMenuEventArgs)OnContextMenuClosing(self: CheckBox_20$21,e: ContextMenuEventArgs)OnContextMenuClosing(self: ComboBox_21$22,e: ContextMenuEventArgs)OnContextMenuClosing(self: Separator_22$23,e: ContextMenuEventArgs)
"""
pass
def OnContextMenuOpening(self,*args):
"""
OnContextMenuOpening(self: FrameworkElement,e: ContextMenuEventArgs)
Invoked whenever an unhandled
System.Windows.FrameworkElement.ContextMenuOpening routed event reaches this
class in its route. Implement this method to add class handling for this event.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnContextMenuOpening(self: Window_16$17,e: ContextMenuEventArgs)OnContextMenuOpening(self: Label_17$18,e: ContextMenuEventArgs)OnContextMenuOpening(self: TextBox_18$19,e: ContextMenuEventArgs)OnContextMenuOpening(self: Button_19$20,e: ContextMenuEventArgs)OnContextMenuOpening(self: CheckBox_20$21,e: ContextMenuEventArgs)OnContextMenuOpening(self: ComboBox_21$22,e: ContextMenuEventArgs)OnContextMenuOpening(self: Separator_22$23,e: ContextMenuEventArgs)
"""
pass
def OnCreateAutomationPeer(self,*args):
"""
OnCreateAutomationPeer(self: GroupBox) -> AutomationPeer
Creates an implementation of System.Windows.Automation.Peers.AutomationPeer for
the System.Windows.Controls.GroupBox control.
Returns: A System.Windows.Automation.Peers.GroupBoxAutomationPeer for the
System.Windows.Controls.GroupBox.
"""
pass
def OnDpiChanged(self,*args):
""" OnDpiChanged(self: Visual,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Window_16$17,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Label_17$18,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: TextBox_18$19,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Button_19$20,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: CheckBox_20$21,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: ComboBox_21$22,oldDpi: DpiScale,newDpi: DpiScale)OnDpiChanged(self: Separator_22$23,oldDpi: DpiScale,newDpi: DpiScale) """
pass
def OnDragEnter(self,*args):
"""
OnDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragEnter(self: Window_16$17,e: DragEventArgs)OnDragEnter(self: Label_17$18,e: DragEventArgs)OnDragEnter(self: TextBox_18$19,e: DragEventArgs)OnDragEnter(self: Button_19$20,e: DragEventArgs)OnDragEnter(self: CheckBox_20$21,e: DragEventArgs)OnDragEnter(self: ComboBox_21$22,e: DragEventArgs)OnDragEnter(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDragLeave(self,*args):
"""
OnDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragLeave�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragLeave(self: Window_16$17,e: DragEventArgs)OnDragLeave(self: Label_17$18,e: DragEventArgs)OnDragLeave(self: TextBox_18$19,e: DragEventArgs)OnDragLeave(self: Button_19$20,e: DragEventArgs)OnDragLeave(self: CheckBox_20$21,e: DragEventArgs)OnDragLeave(self: ComboBox_21$22,e: DragEventArgs)OnDragLeave(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDragOver(self,*args):
"""
OnDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragOver�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDragOver(self: Window_16$17,e: DragEventArgs)OnDragOver(self: Label_17$18,e: DragEventArgs)OnDragOver(self: TextBox_18$19,e: DragEventArgs)OnDragOver(self: Button_19$20,e: DragEventArgs)OnDragOver(self: CheckBox_20$21,e: DragEventArgs)OnDragOver(self: ComboBox_21$22,e: DragEventArgs)OnDragOver(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnDrop(self,*args):
"""
OnDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.DragEnter�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnDrop(self: Window_16$17,e: DragEventArgs)OnDrop(self: Label_17$18,e: DragEventArgs)OnDrop(self: TextBox_18$19,e: DragEventArgs)OnDrop(self: Button_19$20,e: DragEventArgs)OnDrop(self: CheckBox_20$21,e: DragEventArgs)OnDrop(self: ComboBox_21$22,e: DragEventArgs)OnDrop(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnGiveFeedback(self,*args):
"""
OnGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.GiveFeedback�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
OnGiveFeedback(self: Window_16$17,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Label_17$18,e: GiveFeedbackEventArgs)OnGiveFeedback(self: TextBox_18$19,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Button_19$20,e: GiveFeedbackEventArgs)OnGiveFeedback(self: CheckBox_20$21,e: GiveFeedbackEventArgs)OnGiveFeedback(self: ComboBox_21$22,e: GiveFeedbackEventArgs)OnGiveFeedback(self: Separator_22$23,e: GiveFeedbackEventArgs)
"""
pass
def OnGotFocus(self,*args):
"""
OnGotFocus(self: FrameworkElement,e: RoutedEventArgs)
Invoked whenever an unhandled System.Windows.UIElement.GotFocus event reaches
this element in its route.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnGotFocus(self: Window_16$17,e: RoutedEventArgs)OnGotFocus(self: Label_17$18,e: RoutedEventArgs)OnGotFocus(self: TextBox_18$19,e: RoutedEventArgs)OnGotFocus(self: Button_19$20,e: RoutedEventArgs)OnGotFocus(self: CheckBox_20$21,e: RoutedEventArgs)OnGotFocus(self: Separator_22$23,e: RoutedEventArgs)
"""
pass
def OnGotKeyboardFocus(self,*args):
"""
OnGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.GotKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnGotKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnGotKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnGotMouseCapture(self,*args):
"""
OnGotMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.GotMouseCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnGotMouseCapture(self: Window_16$17,e: MouseEventArgs)OnGotMouseCapture(self: Label_17$18,e: MouseEventArgs)OnGotMouseCapture(self: TextBox_18$19,e: MouseEventArgs)OnGotMouseCapture(self: Button_19$20,e: MouseEventArgs)OnGotMouseCapture(self: CheckBox_20$21,e: MouseEventArgs)OnGotMouseCapture(self: ComboBox_21$22,e: MouseEventArgs)OnGotMouseCapture(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnGotStylusCapture(self,*args):
"""
OnGotStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.GotStylusCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnGotStylusCapture(self: Window_16$17,e: StylusEventArgs)OnGotStylusCapture(self: Label_17$18,e: StylusEventArgs)OnGotStylusCapture(self: TextBox_18$19,e: StylusEventArgs)OnGotStylusCapture(self: Button_19$20,e: StylusEventArgs)OnGotStylusCapture(self: CheckBox_20$21,e: StylusEventArgs)OnGotStylusCapture(self: ComboBox_21$22,e: StylusEventArgs)OnGotStylusCapture(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnGotTouchCapture(self,*args):
"""
OnGotTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.GotTouchCapture routed
event that occurs when a touch is captured to this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnGotTouchCapture(self: Window_16$17,e: TouchEventArgs)OnGotTouchCapture(self: Label_17$18,e: TouchEventArgs)OnGotTouchCapture(self: TextBox_18$19,e: TouchEventArgs)OnGotTouchCapture(self: Button_19$20,e: TouchEventArgs)OnGotTouchCapture(self: CheckBox_20$21,e: TouchEventArgs)OnGotTouchCapture(self: ComboBox_21$22,e: TouchEventArgs)OnGotTouchCapture(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnHeaderChanged(self,*args):
"""
OnHeaderChanged(self: HeaderedContentControl,oldHeader: object,newHeader: object)
Called when the System.Windows.Controls.HeaderedContentControl.Header property
of a System.Windows.Controls.HeaderedContentControl changes.
oldHeader: Old value of the System.Windows.Controls.HeaderedContentControl.Header property.
newHeader: New value of the System.Windows.Controls.HeaderedContentControl.Header property.
"""
pass
def OnHeaderStringFormatChanged(self,*args):
"""
OnHeaderStringFormatChanged(self: HeaderedContentControl,oldHeaderStringFormat: str,newHeaderStringFormat: str)
Called when the
System.Windows.Controls.HeaderedContentControl.HeaderStringFormat property
changes.
oldHeaderStringFormat: The old value of the
System.Windows.Controls.HeaderedContentControl.HeaderStringFormat property.
newHeaderStringFormat: The new value of the
System.Windows.Controls.HeaderedContentControl.HeaderStringFormat property.
"""
pass
def OnHeaderTemplateChanged(self,*args):
"""
OnHeaderTemplateChanged(self: HeaderedContentControl,oldHeaderTemplate: DataTemplate,newHeaderTemplate: DataTemplate)
Called when the System.Windows.Controls.HeaderedContentControl.HeaderTemplate
property changes.
oldHeaderTemplate: Old value of the System.Windows.Controls.HeaderedContentControl.HeaderTemplate
property.
newHeaderTemplate: New value of the System.Windows.Controls.HeaderedContentControl.HeaderTemplate
property.
"""
pass
def OnHeaderTemplateSelectorChanged(self,*args):
"""
OnHeaderTemplateSelectorChanged(self: HeaderedContentControl,oldHeaderTemplateSelector: DataTemplateSelector,newHeaderTemplateSelector: DataTemplateSelector)
Called when the
System.Windows.Controls.HeaderedContentControl.HeaderTemplateSelector property
changes.
oldHeaderTemplateSelector: Old value of the
System.Windows.Controls.HeaderedContentControl.HeaderTemplateSelector property.
newHeaderTemplateSelector: New value of the
System.Windows.Controls.HeaderedContentControl.HeaderTemplateSelector property.
"""
pass
def OnInitialized(self,*args):
"""
OnInitialized(self: FrameworkElement,e: EventArgs)
Raises the System.Windows.FrameworkElement.Initialized event. This method is
invoked whenever System.Windows.FrameworkElement.IsInitialized is set to true
internally.
e: The System.Windows.RoutedEventArgs that contains the event data.
OnInitialized(self: Window_16$17,e: EventArgs)OnInitialized(self: Label_17$18,e: EventArgs)OnInitialized(self: TextBox_18$19,e: EventArgs)OnInitialized(self: Button_19$20,e: EventArgs)OnInitialized(self: CheckBox_20$21,e: EventArgs)OnInitialized(self: ComboBox_21$22,e: EventArgs)OnInitialized(self: Separator_22$23,e: EventArgs)
"""
pass
def OnIsKeyboardFocusedChanged(self,*args):
"""
OnIsKeyboardFocusedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsKeyboardFocusedChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsKeyboardFocusedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsKeyboardFocusWithinChanged(self,*args):
"""
OnIsKeyboardFocusWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked just before the System.Windows.UIElement.IsKeyboardFocusWithinChanged
event is raised by this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsKeyboardFocusWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsKeyboardFocusWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseCapturedChanged(self,*args):
"""
OnIsMouseCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCapturedChanged event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseCapturedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseCapturedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseCaptureWithinChanged(self,*args):
"""
OnIsMouseCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseCaptureWithinChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseCaptureWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseCaptureWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsMouseDirectlyOverChanged(self,*args):
"""
OnIsMouseDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsMouseDirectlyOverChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsMouseDirectlyOverChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsMouseDirectlyOverChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusCapturedChanged(self,*args):
"""
OnIsStylusCapturedChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCapturedChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: A System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusCapturedChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusCapturedChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusCaptureWithinChanged(self,*args):
"""
OnIsStylusCaptureWithinChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusCaptureWithinChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusCaptureWithinChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusCaptureWithinChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnIsStylusDirectlyOverChanged(self,*args):
"""
OnIsStylusDirectlyOverChanged(self: UIElement,e: DependencyPropertyChangedEventArgs)
Invoked when an unhandled System.Windows.UIElement.IsStylusDirectlyOverChanged
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.DependencyPropertyChangedEventArgs that contains the event
data.
OnIsStylusDirectlyOverChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnIsStylusDirectlyOverChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnKeyDown(self,*args):
"""
OnKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnKeyDown(self: Window_16$17,e: KeyEventArgs)OnKeyDown(self: Label_17$18,e: KeyEventArgs)OnKeyDown(self: TextBox_18$19,e: KeyEventArgs)OnKeyDown(self: Button_19$20,e: KeyEventArgs)OnKeyDown(self: CheckBox_20$21,e: KeyEventArgs)OnKeyDown(self: ComboBox_21$22,e: KeyEventArgs)OnKeyDown(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnKeyUp(self,*args):
"""
OnKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.KeyUp�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnKeyUp(self: Window_16$17,e: KeyEventArgs)OnKeyUp(self: Label_17$18,e: KeyEventArgs)OnKeyUp(self: TextBox_18$19,e: KeyEventArgs)OnKeyUp(self: Button_19$20,e: KeyEventArgs)OnKeyUp(self: CheckBox_20$21,e: KeyEventArgs)OnKeyUp(self: ComboBox_21$22,e: KeyEventArgs)OnKeyUp(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnLostFocus(self,*args):
"""
OnLostFocus(self: UIElement,e: RoutedEventArgs)
Raises the System.Windows.UIElement.LostFocus�routed event by using the event
data that is provided.
e: A System.Windows.RoutedEventArgs that contains event data. This event data must
contain the identifier for the System.Windows.UIElement.LostFocus event.
OnLostFocus(self: Window_16$17,e: RoutedEventArgs)OnLostFocus(self: Label_17$18,e: RoutedEventArgs)OnLostFocus(self: TextBox_18$19,e: RoutedEventArgs)OnLostFocus(self: Button_19$20,e: RoutedEventArgs)OnLostFocus(self: CheckBox_20$21,e: RoutedEventArgs)OnLostFocus(self: ComboBox_21$22,e: RoutedEventArgs)OnLostFocus(self: Separator_22$23,e: RoutedEventArgs)
"""
pass
def OnLostKeyboardFocus(self,*args):
"""
OnLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.LostKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains event data.
OnLostKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnLostKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnLostMouseCapture(self,*args):
"""
OnLostMouseCapture(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.LostMouseCapture�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains event data.
OnLostMouseCapture(self: Window_16$17,e: MouseEventArgs)OnLostMouseCapture(self: Label_17$18,e: MouseEventArgs)OnLostMouseCapture(self: TextBox_18$19,e: MouseEventArgs)OnLostMouseCapture(self: Button_19$20,e: MouseEventArgs)OnLostMouseCapture(self: CheckBox_20$21,e: MouseEventArgs)OnLostMouseCapture(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnLostStylusCapture(self,*args):
"""
OnLostStylusCapture(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.LostStylusCapture�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains event data.
OnLostStylusCapture(self: Window_16$17,e: StylusEventArgs)OnLostStylusCapture(self: Label_17$18,e: StylusEventArgs)OnLostStylusCapture(self: TextBox_18$19,e: StylusEventArgs)OnLostStylusCapture(self: Button_19$20,e: StylusEventArgs)OnLostStylusCapture(self: CheckBox_20$21,e: StylusEventArgs)OnLostStylusCapture(self: ComboBox_21$22,e: StylusEventArgs)OnLostStylusCapture(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnLostTouchCapture(self,*args):
"""
OnLostTouchCapture(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.LostTouchCapture
routed event that occurs when this element loses a touch capture.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnLostTouchCapture(self: Window_16$17,e: TouchEventArgs)OnLostTouchCapture(self: Label_17$18,e: TouchEventArgs)OnLostTouchCapture(self: TextBox_18$19,e: TouchEventArgs)OnLostTouchCapture(self: Button_19$20,e: TouchEventArgs)OnLostTouchCapture(self: CheckBox_20$21,e: TouchEventArgs)OnLostTouchCapture(self: ComboBox_21$22,e: TouchEventArgs)OnLostTouchCapture(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnManipulationBoundaryFeedback(self,*args):
"""
OnManipulationBoundaryFeedback(self: UIElement,e: ManipulationBoundaryFeedbackEventArgs)
Called when the System.Windows.UIElement.ManipulationBoundaryFeedback event
occurs.
e: The data for the event.
OnManipulationBoundaryFeedback(self: Window_16$17,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Label_17$18,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: TextBox_18$19,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Button_19$20,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: CheckBox_20$21,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: ComboBox_21$22,e: ManipulationBoundaryFeedbackEventArgs)OnManipulationBoundaryFeedback(self: Separator_22$23,e: ManipulationBoundaryFeedbackEventArgs)
"""
pass
def OnManipulationCompleted(self,*args):
"""
OnManipulationCompleted(self: UIElement,e: ManipulationCompletedEventArgs)
Called when the System.Windows.UIElement.ManipulationCompleted event occurs.
e: The data for the event.
OnManipulationCompleted(self: Window_16$17,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Label_17$18,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: TextBox_18$19,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Button_19$20,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: CheckBox_20$21,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: ComboBox_21$22,e: ManipulationCompletedEventArgs)OnManipulationCompleted(self: Separator_22$23,e: ManipulationCompletedEventArgs)
"""
pass
def OnManipulationDelta(self,*args):
"""
OnManipulationDelta(self: UIElement,e: ManipulationDeltaEventArgs)
Called when the System.Windows.UIElement.ManipulationDelta event occurs.
e: The data for the event.
OnManipulationDelta(self: Window_16$17,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Label_17$18,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: TextBox_18$19,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Button_19$20,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: CheckBox_20$21,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: ComboBox_21$22,e: ManipulationDeltaEventArgs)OnManipulationDelta(self: Separator_22$23,e: ManipulationDeltaEventArgs)
"""
pass
def OnManipulationInertiaStarting(self,*args):
"""
OnManipulationInertiaStarting(self: UIElement,e: ManipulationInertiaStartingEventArgs)
Called when the System.Windows.UIElement.ManipulationInertiaStarting event
occurs.
e: The data for the event.
OnManipulationInertiaStarting(self: Window_16$17,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Label_17$18,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: TextBox_18$19,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Button_19$20,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: CheckBox_20$21,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: ComboBox_21$22,e: ManipulationInertiaStartingEventArgs)OnManipulationInertiaStarting(self: Separator_22$23,e: ManipulationInertiaStartingEventArgs)
"""
pass
def OnManipulationStarted(self,*args):
"""
OnManipulationStarted(self: UIElement,e: ManipulationStartedEventArgs)
Called when the System.Windows.UIElement.ManipulationStarted event occurs.
e: The data for the event.
OnManipulationStarted(self: Window_16$17,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Label_17$18,e: ManipulationStartedEventArgs)OnManipulationStarted(self: TextBox_18$19,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Button_19$20,e: ManipulationStartedEventArgs)OnManipulationStarted(self: CheckBox_20$21,e: ManipulationStartedEventArgs)OnManipulationStarted(self: ComboBox_21$22,e: ManipulationStartedEventArgs)OnManipulationStarted(self: Separator_22$23,e: ManipulationStartedEventArgs)
"""
pass
def OnManipulationStarting(self,*args):
"""
OnManipulationStarting(self: UIElement,e: ManipulationStartingEventArgs)
Provides class handling for the System.Windows.UIElement.ManipulationStarting
routed event that occurs when the manipulation processor is first created.
e: A System.Windows.Input.ManipulationStartingEventArgs that contains the event
data.
OnManipulationStarting(self: Window_16$17,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Label_17$18,e: ManipulationStartingEventArgs)OnManipulationStarting(self: TextBox_18$19,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Button_19$20,e: ManipulationStartingEventArgs)OnManipulationStarting(self: CheckBox_20$21,e: ManipulationStartingEventArgs)OnManipulationStarting(self: ComboBox_21$22,e: ManipulationStartingEventArgs)OnManipulationStarting(self: Separator_22$23,e: ManipulationStartingEventArgs)
"""
pass
def OnMouseDoubleClick(self,*args):
"""
OnMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.MouseDoubleClick routed event.
e: The event data.
OnMouseDoubleClick(self: Window_16$17,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Label_17$18,e: MouseButtonEventArgs)OnMouseDoubleClick(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Button_19$20,e: MouseButtonEventArgs)OnMouseDoubleClick(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseDoubleClick(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseDoubleClick(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseDown(self,*args):
"""
OnMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data.
This event data reports details about the mouse button that was pressed and the
handled state.
OnMouseDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseEnter(self,*args):
"""
OnMouseEnter(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseEnter�attached event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseEnter(self: Window_16$17,e: MouseEventArgs)OnMouseEnter(self: Label_17$18,e: MouseEventArgs)OnMouseEnter(self: TextBox_18$19,e: MouseEventArgs)OnMouseEnter(self: Button_19$20,e: MouseEventArgs)OnMouseEnter(self: CheckBox_20$21,e: MouseEventArgs)OnMouseEnter(self: ComboBox_21$22,e: MouseEventArgs)OnMouseEnter(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseLeave(self,*args):
"""
OnMouseLeave(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseLeave�attached event
is raised on this element. Implement this method to add class handling for this
event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseLeave(self: Window_16$17,e: MouseEventArgs)OnMouseLeave(self: Label_17$18,e: MouseEventArgs)OnMouseLeave(self: TextBox_18$19,e: MouseEventArgs)OnMouseLeave(self: Button_19$20,e: MouseEventArgs)OnMouseLeave(self: CheckBox_20$21,e: MouseEventArgs)OnMouseLeave(self: ComboBox_21$22,e: MouseEventArgs)OnMouseLeave(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseLeftButtonDown(self,*args):
"""
OnMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonDown�routed
event is raised on this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was pressed.
OnMouseLeftButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseLeftButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseLeftButtonUp(self,*args):
"""
OnMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseLeftButtonUp�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was released.
OnMouseLeftButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseLeftButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseMove(self,*args):
"""
OnMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseMove�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnMouseMove(self: Window_16$17,e: MouseEventArgs)OnMouseMove(self: Label_17$18,e: MouseEventArgs)OnMouseMove(self: TextBox_18$19,e: MouseEventArgs)OnMouseMove(self: Button_19$20,e: MouseEventArgs)OnMouseMove(self: CheckBox_20$21,e: MouseEventArgs)OnMouseMove(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnMouseRightButtonDown(self,*args):
"""
OnMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonDown�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was pressed.
OnMouseRightButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseRightButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseRightButtonUp(self,*args):
"""
OnMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.MouseRightButtonUp�routed
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was released.
OnMouseRightButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseRightButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseUp(self,*args):
"""
OnMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseUp�routed event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the mouse button was released.
OnMouseUp(self: Window_16$17,e: MouseButtonEventArgs)OnMouseUp(self: Label_17$18,e: MouseButtonEventArgs)OnMouseUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnMouseUp(self: Button_19$20,e: MouseButtonEventArgs)OnMouseUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnMouseUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnMouseUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnMouseWheel(self,*args):
"""
OnMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.MouseWheel�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
OnMouseWheel(self: Window_16$17,e: MouseWheelEventArgs)OnMouseWheel(self: Label_17$18,e: MouseWheelEventArgs)OnMouseWheel(self: TextBox_18$19,e: MouseWheelEventArgs)OnMouseWheel(self: Button_19$20,e: MouseWheelEventArgs)OnMouseWheel(self: CheckBox_20$21,e: MouseWheelEventArgs)OnMouseWheel(self: Separator_22$23,e: MouseWheelEventArgs)
"""
pass
def OnPreviewDragEnter(self,*args):
"""
OnPreviewDragEnter(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragEnter�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragEnter(self: Window_16$17,e: DragEventArgs)OnPreviewDragEnter(self: Label_17$18,e: DragEventArgs)OnPreviewDragEnter(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragEnter(self: Button_19$20,e: DragEventArgs)OnPreviewDragEnter(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragEnter(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragEnter(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDragLeave(self,*args):
"""
OnPreviewDragLeave(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragLeave�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragLeave(self: Window_16$17,e: DragEventArgs)OnPreviewDragLeave(self: Label_17$18,e: DragEventArgs)OnPreviewDragLeave(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragLeave(self: Button_19$20,e: DragEventArgs)OnPreviewDragLeave(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragLeave(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragLeave(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDragOver(self,*args):
"""
OnPreviewDragOver(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDragOver�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDragOver(self: Window_16$17,e: DragEventArgs)OnPreviewDragOver(self: Label_17$18,e: DragEventArgs)OnPreviewDragOver(self: TextBox_18$19,e: DragEventArgs)OnPreviewDragOver(self: Button_19$20,e: DragEventArgs)OnPreviewDragOver(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDragOver(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDragOver(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewDrop(self,*args):
"""
OnPreviewDrop(self: UIElement,e: DragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewDrop�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.DragEventArgs that contains the event data.
OnPreviewDrop(self: Window_16$17,e: DragEventArgs)OnPreviewDrop(self: Label_17$18,e: DragEventArgs)OnPreviewDrop(self: TextBox_18$19,e: DragEventArgs)OnPreviewDrop(self: Button_19$20,e: DragEventArgs)OnPreviewDrop(self: CheckBox_20$21,e: DragEventArgs)OnPreviewDrop(self: ComboBox_21$22,e: DragEventArgs)OnPreviewDrop(self: Separator_22$23,e: DragEventArgs)
"""
pass
def OnPreviewGiveFeedback(self,*args):
"""
OnPreviewGiveFeedback(self: UIElement,e: GiveFeedbackEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewGiveFeedback�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.GiveFeedbackEventArgs that contains the event data.
OnPreviewGiveFeedback(self: Window_16$17,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Label_17$18,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: TextBox_18$19,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Button_19$20,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: CheckBox_20$21,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: ComboBox_21$22,e: GiveFeedbackEventArgs)OnPreviewGiveFeedback(self: Separator_22$23,e: GiveFeedbackEventArgs)
"""
pass
def OnPreviewGotKeyboardFocus(self,*args):
"""
OnPreviewGotKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewGotKeyboardFocus�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnPreviewGotKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnPreviewGotKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnPreviewKeyDown(self,*args):
"""
OnPreviewKeyDown(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnPreviewKeyDown(self: Window_16$17,e: KeyEventArgs)OnPreviewKeyDown(self: Label_17$18,e: KeyEventArgs)OnPreviewKeyDown(self: TextBox_18$19,e: KeyEventArgs)OnPreviewKeyDown(self: Button_19$20,e: KeyEventArgs)OnPreviewKeyDown(self: CheckBox_20$21,e: KeyEventArgs)OnPreviewKeyDown(self: ComboBox_21$22,e: KeyEventArgs)OnPreviewKeyDown(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnPreviewKeyUp(self,*args):
"""
OnPreviewKeyUp(self: UIElement,e: KeyEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyEventArgs that contains the event data.
OnPreviewKeyUp(self: Window_16$17,e: KeyEventArgs)OnPreviewKeyUp(self: Label_17$18,e: KeyEventArgs)OnPreviewKeyUp(self: TextBox_18$19,e: KeyEventArgs)OnPreviewKeyUp(self: Button_19$20,e: KeyEventArgs)OnPreviewKeyUp(self: CheckBox_20$21,e: KeyEventArgs)OnPreviewKeyUp(self: ComboBox_21$22,e: KeyEventArgs)OnPreviewKeyUp(self: Separator_22$23,e: KeyEventArgs)
"""
pass
def OnPreviewLostKeyboardFocus(self,*args):
"""
OnPreviewLostKeyboardFocus(self: UIElement,e: KeyboardFocusChangedEventArgs)
Invoked when an unhandled System.Windows.Input.Keyboard.PreviewKeyDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.KeyboardFocusChangedEventArgs that contains the event
data.
OnPreviewLostKeyboardFocus(self: Window_16$17,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Label_17$18,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: TextBox_18$19,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Button_19$20,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: CheckBox_20$21,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: ComboBox_21$22,e: KeyboardFocusChangedEventArgs)OnPreviewLostKeyboardFocus(self: Separator_22$23,e: KeyboardFocusChangedEventArgs)
"""
pass
def OnPreviewMouseDoubleClick(self,*args):
"""
OnPreviewMouseDoubleClick(self: Control,e: MouseButtonEventArgs)
Raises the System.Windows.Controls.Control.PreviewMouseDoubleClick routed event.
e: The event data.
OnPreviewMouseDoubleClick(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseDoubleClick(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseDown(self,*args):
"""
OnPreviewMouseDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseDown attached�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that one or more mouse buttons were pressed.
OnPreviewMouseDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseLeftButtonDown(self,*args):
"""
OnPreviewMouseLeftButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonDown�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was pressed.
OnPreviewMouseLeftButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseLeftButtonUp(self,*args):
"""
OnPreviewMouseLeftButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseLeftButtonUp�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the left mouse button was released.
OnPreviewMouseLeftButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseLeftButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseMove(self,*args):
"""
OnPreviewMouseMove(self: UIElement,e: MouseEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseMove�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseEventArgs that contains the event data.
OnPreviewMouseMove(self: Window_16$17,e: MouseEventArgs)OnPreviewMouseMove(self: Label_17$18,e: MouseEventArgs)OnPreviewMouseMove(self: TextBox_18$19,e: MouseEventArgs)OnPreviewMouseMove(self: Button_19$20,e: MouseEventArgs)OnPreviewMouseMove(self: CheckBox_20$21,e: MouseEventArgs)OnPreviewMouseMove(self: ComboBox_21$22,e: MouseEventArgs)OnPreviewMouseMove(self: Separator_22$23,e: MouseEventArgs)
"""
pass
def OnPreviewMouseRightButtonDown(self,*args):
"""
OnPreviewMouseRightButtonDown(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonDown�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was pressed.
OnPreviewMouseRightButtonDown(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseRightButtonDown(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseRightButtonUp(self,*args):
"""
OnPreviewMouseRightButtonUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.UIElement.PreviewMouseRightButtonUp�
routed event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that the right mouse button was released.
OnPreviewMouseRightButtonUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseRightButtonUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseUp(self,*args):
"""
OnPreviewMouseUp(self: UIElement,e: MouseButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseButtonEventArgs that contains the event data. The
event data reports that one or more mouse buttons were released.
OnPreviewMouseUp(self: Window_16$17,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Label_17$18,e: MouseButtonEventArgs)OnPreviewMouseUp(self: TextBox_18$19,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Button_19$20,e: MouseButtonEventArgs)OnPreviewMouseUp(self: CheckBox_20$21,e: MouseButtonEventArgs)OnPreviewMouseUp(self: ComboBox_21$22,e: MouseButtonEventArgs)OnPreviewMouseUp(self: Separator_22$23,e: MouseButtonEventArgs)
"""
pass
def OnPreviewMouseWheel(self,*args):
"""
OnPreviewMouseWheel(self: UIElement,e: MouseWheelEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.PreviewMouseWheel�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.MouseWheelEventArgs that contains the event data.
OnPreviewMouseWheel(self: Window_16$17,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Label_17$18,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: TextBox_18$19,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Button_19$20,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: CheckBox_20$21,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: ComboBox_21$22,e: MouseWheelEventArgs)OnPreviewMouseWheel(self: Separator_22$23,e: MouseWheelEventArgs)
"""
pass
def OnPreviewQueryContinueDrag(self,*args):
"""
OnPreviewQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.PreviewQueryContinueDrag�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
OnPreviewQueryContinueDrag(self: Window_16$17,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Label_17$18,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: TextBox_18$19,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Button_19$20,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: CheckBox_20$21,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: ComboBox_21$22,e: QueryContinueDragEventArgs)OnPreviewQueryContinueDrag(self: Separator_22$23,e: QueryContinueDragEventArgs)
"""
pass
def OnPreviewStylusButtonDown(self,*args):
"""
OnPreviewStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonDown�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnPreviewStylusButtonDown(self: Window_16$17,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Label_17$18,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: TextBox_18$19,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Button_19$20,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: CheckBox_20$21,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: ComboBox_21$22,e: StylusButtonEventArgs)OnPreviewStylusButtonDown(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnPreviewStylusButtonUp(self,*args):
"""
OnPreviewStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusButtonUp�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnPreviewStylusButtonUp(self: Window_16$17,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Label_17$18,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: TextBox_18$19,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Button_19$20,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: CheckBox_20$21,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: ComboBox_21$22,e: StylusButtonEventArgs)OnPreviewStylusButtonUp(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnPreviewStylusDown(self,*args):
"""
OnPreviewStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusDown�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
OnPreviewStylusDown(self: Window_16$17,e: StylusDownEventArgs)OnPreviewStylusDown(self: Label_17$18,e: StylusDownEventArgs)OnPreviewStylusDown(self: TextBox_18$19,e: StylusDownEventArgs)OnPreviewStylusDown(self: Button_19$20,e: StylusDownEventArgs)OnPreviewStylusDown(self: CheckBox_20$21,e: StylusDownEventArgs)OnPreviewStylusDown(self: ComboBox_21$22,e: StylusDownEventArgs)OnPreviewStylusDown(self: Separator_22$23,e: StylusDownEventArgs)
"""
pass
def OnPreviewStylusInAirMove(self,*args):
"""
OnPreviewStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInAirMove�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusInAirMove(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusInAirMove(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusInAirMove(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusInAirMove(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusInAirMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusInRange(self,*args):
"""
OnPreviewStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusInRange�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusInRange(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusInRange(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusInRange(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusInRange(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusInRange(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusInRange(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusInRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusMove(self,*args):
"""
OnPreviewStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusMove�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusMove(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusMove(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusMove(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusMove(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusMove(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusMove(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusOutOfRange(self,*args):
"""
OnPreviewStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusOutOfRange�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusOutOfRange(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusOutOfRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewStylusSystemGesture(self,*args):
"""
OnPreviewStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled
System.Windows.Input.Stylus.PreviewStylusSystemGesture�attached event reaches
an element in its route that is derived from this class. Implement this method
to add class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event
data.
OnPreviewStylusSystemGesture(self: Window_16$17,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Label_17$18,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: TextBox_18$19,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Button_19$20,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: CheckBox_20$21,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: ComboBox_21$22,e: StylusSystemGestureEventArgs)OnPreviewStylusSystemGesture(self: Separator_22$23,e: StylusSystemGestureEventArgs)
"""
pass
def OnPreviewStylusUp(self,*args):
"""
OnPreviewStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.PreviewStylusUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnPreviewStylusUp(self: Window_16$17,e: StylusEventArgs)OnPreviewStylusUp(self: Label_17$18,e: StylusEventArgs)OnPreviewStylusUp(self: TextBox_18$19,e: StylusEventArgs)OnPreviewStylusUp(self: Button_19$20,e: StylusEventArgs)OnPreviewStylusUp(self: CheckBox_20$21,e: StylusEventArgs)OnPreviewStylusUp(self: ComboBox_21$22,e: StylusEventArgs)OnPreviewStylusUp(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnPreviewTextInput(self,*args):
"""
OnPreviewTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled
System.Windows.Input.TextCompositionManager.PreviewTextInput�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
OnPreviewTextInput(self: Window_16$17,e: TextCompositionEventArgs)OnPreviewTextInput(self: Label_17$18,e: TextCompositionEventArgs)OnPreviewTextInput(self: TextBox_18$19,e: TextCompositionEventArgs)OnPreviewTextInput(self: Button_19$20,e: TextCompositionEventArgs)OnPreviewTextInput(self: CheckBox_20$21,e: TextCompositionEventArgs)OnPreviewTextInput(self: ComboBox_21$22,e: TextCompositionEventArgs)OnPreviewTextInput(self: Separator_22$23,e: TextCompositionEventArgs)
"""
pass
def OnPreviewTouchDown(self,*args):
"""
OnPreviewTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchDown
routed event that occurs when a touch presses this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchDown(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchDown(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchDown(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchDown(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchDown(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchDown(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchDown(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPreviewTouchMove(self,*args):
"""
OnPreviewTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchMove
routed event that occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchMove(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchMove(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchMove(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchMove(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchMove(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchMove(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchMove(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPreviewTouchUp(self,*args):
"""
OnPreviewTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.PreviewTouchUp routed
event that occurs when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnPreviewTouchUp(self: Window_16$17,e: TouchEventArgs)OnPreviewTouchUp(self: Label_17$18,e: TouchEventArgs)OnPreviewTouchUp(self: TextBox_18$19,e: TouchEventArgs)OnPreviewTouchUp(self: Button_19$20,e: TouchEventArgs)OnPreviewTouchUp(self: CheckBox_20$21,e: TouchEventArgs)OnPreviewTouchUp(self: ComboBox_21$22,e: TouchEventArgs)OnPreviewTouchUp(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnPropertyChanged(self,*args):
"""
OnPropertyChanged(self: FrameworkElement,e: DependencyPropertyChangedEventArgs)
Invoked whenever the effective value of any dependency property on this
System.Windows.FrameworkElement has been updated. The specific dependency
property that changed is reported in the arguments parameter. Overrides
System.Windows.DependencyObject.OnPropertyChanged(System.Windows.DependencyPrope
rtyChangedEventArgs).
e: The event data that describes the property that changed,as well as old and new
values.
OnPropertyChanged(self: Window_16$17,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Label_17$18,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: TextBox_18$19,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Button_19$20,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: CheckBox_20$21,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: ComboBox_21$22,e: DependencyPropertyChangedEventArgs)OnPropertyChanged(self: Separator_22$23,e: DependencyPropertyChangedEventArgs)
"""
pass
def OnQueryContinueDrag(self,*args):
"""
OnQueryContinueDrag(self: UIElement,e: QueryContinueDragEventArgs)
Invoked when an unhandled System.Windows.DragDrop.QueryContinueDrag�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.QueryContinueDragEventArgs that contains the event data.
OnQueryContinueDrag(self: Window_16$17,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Label_17$18,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: TextBox_18$19,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Button_19$20,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: CheckBox_20$21,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: ComboBox_21$22,e: QueryContinueDragEventArgs)OnQueryContinueDrag(self: Separator_22$23,e: QueryContinueDragEventArgs)
"""
pass
def OnQueryCursor(self,*args):
"""
OnQueryCursor(self: UIElement,e: QueryCursorEventArgs)
Invoked when an unhandled System.Windows.Input.Mouse.QueryCursor�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.QueryCursorEventArgs that contains the event data.
OnQueryCursor(self: Window_16$17,e: QueryCursorEventArgs)OnQueryCursor(self: Label_17$18,e: QueryCursorEventArgs)OnQueryCursor(self: TextBox_18$19,e: QueryCursorEventArgs)OnQueryCursor(self: Button_19$20,e: QueryCursorEventArgs)OnQueryCursor(self: CheckBox_20$21,e: QueryCursorEventArgs)OnQueryCursor(self: ComboBox_21$22,e: QueryCursorEventArgs)OnQueryCursor(self: Separator_22$23,e: QueryCursorEventArgs)
"""
pass
def OnRender(self,*args):
"""
OnRender(self: UIElement,drawingContext: DrawingContext)
When overridden in a derived class,participates in rendering operations that
are directed by the layout system. The rendering instructions for this element
are not used directly when this method is invoked,and are instead preserved
for later asynchronous use by layout and drawing.
drawingContext: The drawing instructions for a specific element. This context is provided to
the layout system.
OnRender(self: Window_16$17,drawingContext: DrawingContext)OnRender(self: Label_17$18,drawingContext: DrawingContext)OnRender(self: TextBox_18$19,drawingContext: DrawingContext)OnRender(self: Button_19$20,drawingContext: DrawingContext)OnRender(self: CheckBox_20$21,drawingContext: DrawingContext)OnRender(self: ComboBox_21$22,drawingContext: DrawingContext)OnRender(self: Separator_22$23,drawingContext: DrawingContext)
"""
pass
def OnRenderSizeChanged(self,*args):
"""
OnRenderSizeChanged(self: FrameworkElement,sizeInfo: SizeChangedInfo)
Raises the System.Windows.FrameworkElement.SizeChanged event,using the
specified information as part of the eventual event data.
sizeInfo: Details of the old and new size involved in the change.
OnRenderSizeChanged(self: Window_16$17,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Label_17$18,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: TextBox_18$19,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Button_19$20,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: CheckBox_20$21,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: ComboBox_21$22,sizeInfo: SizeChangedInfo)OnRenderSizeChanged(self: Separator_22$23,sizeInfo: SizeChangedInfo)
"""
pass
def OnStyleChanged(self,*args):
"""
OnStyleChanged(self: FrameworkElement,oldStyle: Style,newStyle: Style)
Invoked when the style in use on this element changes,which will invalidate
the layout.
oldStyle: The old style.
newStyle: The new style.
OnStyleChanged(self: Window_16$17,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Label_17$18,oldStyle: Style,newStyle: Style)OnStyleChanged(self: TextBox_18$19,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Button_19$20,oldStyle: Style,newStyle: Style)OnStyleChanged(self: CheckBox_20$21,oldStyle: Style,newStyle: Style)OnStyleChanged(self: ComboBox_21$22,oldStyle: Style,newStyle: Style)OnStyleChanged(self: Separator_22$23,oldStyle: Style,newStyle: Style)
"""
pass
def OnStylusButtonDown(self,*args):
"""
OnStylusButtonDown(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonDown�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnStylusButtonDown(self: Window_16$17,e: StylusButtonEventArgs)OnStylusButtonDown(self: Label_17$18,e: StylusButtonEventArgs)OnStylusButtonDown(self: TextBox_18$19,e: StylusButtonEventArgs)OnStylusButtonDown(self: Button_19$20,e: StylusButtonEventArgs)OnStylusButtonDown(self: CheckBox_20$21,e: StylusButtonEventArgs)OnStylusButtonDown(self: ComboBox_21$22,e: StylusButtonEventArgs)OnStylusButtonDown(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnStylusButtonUp(self,*args):
"""
OnStylusButtonUp(self: UIElement,e: StylusButtonEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusButtonUp�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusButtonEventArgs that contains the event data.
OnStylusButtonUp(self: Window_16$17,e: StylusButtonEventArgs)OnStylusButtonUp(self: Label_17$18,e: StylusButtonEventArgs)OnStylusButtonUp(self: TextBox_18$19,e: StylusButtonEventArgs)OnStylusButtonUp(self: Button_19$20,e: StylusButtonEventArgs)OnStylusButtonUp(self: CheckBox_20$21,e: StylusButtonEventArgs)OnStylusButtonUp(self: ComboBox_21$22,e: StylusButtonEventArgs)OnStylusButtonUp(self: Separator_22$23,e: StylusButtonEventArgs)
"""
pass
def OnStylusDown(self,*args):
"""
OnStylusDown(self: UIElement,e: StylusDownEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusDown�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusDownEventArgs that contains the event data.
OnStylusDown(self: Window_16$17,e: StylusDownEventArgs)OnStylusDown(self: Label_17$18,e: StylusDownEventArgs)OnStylusDown(self: TextBox_18$19,e: StylusDownEventArgs)OnStylusDown(self: Button_19$20,e: StylusDownEventArgs)OnStylusDown(self: CheckBox_20$21,e: StylusDownEventArgs)OnStylusDown(self: ComboBox_21$22,e: StylusDownEventArgs)OnStylusDown(self: Separator_22$23,e: StylusDownEventArgs)
"""
pass
def OnStylusEnter(self,*args):
"""
OnStylusEnter(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusEnter�attached
event is raised by this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusEnter(self: Window_16$17,e: StylusEventArgs)OnStylusEnter(self: Label_17$18,e: StylusEventArgs)OnStylusEnter(self: TextBox_18$19,e: StylusEventArgs)OnStylusEnter(self: Button_19$20,e: StylusEventArgs)OnStylusEnter(self: CheckBox_20$21,e: StylusEventArgs)OnStylusEnter(self: ComboBox_21$22,e: StylusEventArgs)OnStylusEnter(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusInAirMove(self,*args):
"""
OnStylusInAirMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInAirMove�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusInAirMove(self: Window_16$17,e: StylusEventArgs)OnStylusInAirMove(self: Label_17$18,e: StylusEventArgs)OnStylusInAirMove(self: TextBox_18$19,e: StylusEventArgs)OnStylusInAirMove(self: Button_19$20,e: StylusEventArgs)OnStylusInAirMove(self: CheckBox_20$21,e: StylusEventArgs)OnStylusInAirMove(self: ComboBox_21$22,e: StylusEventArgs)OnStylusInAirMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusInRange(self,*args):
"""
OnStylusInRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusInRange�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusInRange(self: Window_16$17,e: StylusEventArgs)OnStylusInRange(self: Label_17$18,e: StylusEventArgs)OnStylusInRange(self: TextBox_18$19,e: StylusEventArgs)OnStylusInRange(self: Button_19$20,e: StylusEventArgs)OnStylusInRange(self: CheckBox_20$21,e: StylusEventArgs)OnStylusInRange(self: ComboBox_21$22,e: StylusEventArgs)OnStylusInRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusLeave(self,*args):
"""
OnStylusLeave(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusLeave�attached
event is raised by this element. Implement this method to add class handling
for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusLeave(self: Window_16$17,e: StylusEventArgs)OnStylusLeave(self: Label_17$18,e: StylusEventArgs)OnStylusLeave(self: TextBox_18$19,e: StylusEventArgs)OnStylusLeave(self: Button_19$20,e: StylusEventArgs)OnStylusLeave(self: CheckBox_20$21,e: StylusEventArgs)OnStylusLeave(self: ComboBox_21$22,e: StylusEventArgs)OnStylusLeave(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusMove(self,*args):
"""
OnStylusMove(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusMove�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusMove(self: Window_16$17,e: StylusEventArgs)OnStylusMove(self: Label_17$18,e: StylusEventArgs)OnStylusMove(self: TextBox_18$19,e: StylusEventArgs)OnStylusMove(self: Button_19$20,e: StylusEventArgs)OnStylusMove(self: CheckBox_20$21,e: StylusEventArgs)OnStylusMove(self: ComboBox_21$22,e: StylusEventArgs)OnStylusMove(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusOutOfRange(self,*args):
"""
OnStylusOutOfRange(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusOutOfRange�attached
event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusOutOfRange(self: Window_16$17,e: StylusEventArgs)OnStylusOutOfRange(self: Label_17$18,e: StylusEventArgs)OnStylusOutOfRange(self: TextBox_18$19,e: StylusEventArgs)OnStylusOutOfRange(self: Button_19$20,e: StylusEventArgs)OnStylusOutOfRange(self: CheckBox_20$21,e: StylusEventArgs)OnStylusOutOfRange(self: ComboBox_21$22,e: StylusEventArgs)OnStylusOutOfRange(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnStylusSystemGesture(self,*args):
"""
OnStylusSystemGesture(self: UIElement,e: StylusSystemGestureEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusSystemGesture�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.StylusSystemGestureEventArgs that contains the event
data.
OnStylusSystemGesture(self: Window_16$17,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Label_17$18,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: TextBox_18$19,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Button_19$20,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: CheckBox_20$21,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: ComboBox_21$22,e: StylusSystemGestureEventArgs)OnStylusSystemGesture(self: Separator_22$23,e: StylusSystemGestureEventArgs)
"""
pass
def OnStylusUp(self,*args):
"""
OnStylusUp(self: UIElement,e: StylusEventArgs)
Invoked when an unhandled System.Windows.Input.Stylus.StylusUp�attached event
reaches an element in its route that is derived from this class. Implement this
method to add class handling for this event.
e: The System.Windows.Input.StylusEventArgs that contains the event data.
OnStylusUp(self: Window_16$17,e: StylusEventArgs)OnStylusUp(self: Label_17$18,e: StylusEventArgs)OnStylusUp(self: TextBox_18$19,e: StylusEventArgs)OnStylusUp(self: Button_19$20,e: StylusEventArgs)OnStylusUp(self: CheckBox_20$21,e: StylusEventArgs)OnStylusUp(self: ComboBox_21$22,e: StylusEventArgs)OnStylusUp(self: Separator_22$23,e: StylusEventArgs)
"""
pass
def OnTemplateChanged(self,*args):
"""
OnTemplateChanged(self: Control,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
Called whenever the control's template changes.
oldTemplate: The old template.
newTemplate: The new template.
OnTemplateChanged(self: Window_16$17,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Label_17$18,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: TextBox_18$19,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Button_19$20,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: CheckBox_20$21,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: ComboBox_21$22,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)OnTemplateChanged(self: Separator_22$23,oldTemplate: ControlTemplate,newTemplate: ControlTemplate)
"""
pass
def OnTextInput(self,*args):
"""
OnTextInput(self: UIElement,e: TextCompositionEventArgs)
Invoked when an unhandled System.Windows.Input.TextCompositionManager.TextInput�
attached event reaches an element in its route that is derived from this class.
Implement this method to add class handling for this event.
e: The System.Windows.Input.TextCompositionEventArgs that contains the event data.
OnTextInput(self: Window_16$17,e: TextCompositionEventArgs)OnTextInput(self: Label_17$18,e: TextCompositionEventArgs)OnTextInput(self: TextBox_18$19,e: TextCompositionEventArgs)OnTextInput(self: Button_19$20,e: TextCompositionEventArgs)OnTextInput(self: CheckBox_20$21,e: TextCompositionEventArgs)OnTextInput(self: ComboBox_21$22,e: TextCompositionEventArgs)OnTextInput(self: Separator_22$23,e: TextCompositionEventArgs)
"""
pass
def OnToolTipClosing(self,*args):
"""
OnToolTipClosing(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever an unhandled System.Windows.FrameworkElement.ToolTipClosing
routed event reaches this class in its route. Implement this method to add
class handling for this event.
e: Provides data about the event.
OnToolTipClosing(self: Window_16$17,e: ToolTipEventArgs)OnToolTipClosing(self: Label_17$18,e: ToolTipEventArgs)OnToolTipClosing(self: TextBox_18$19,e: ToolTipEventArgs)OnToolTipClosing(self: Button_19$20,e: ToolTipEventArgs)OnToolTipClosing(self: CheckBox_20$21,e: ToolTipEventArgs)OnToolTipClosing(self: ComboBox_21$22,e: ToolTipEventArgs)OnToolTipClosing(self: Separator_22$23,e: ToolTipEventArgs)
"""
pass
def OnToolTipOpening(self,*args):
"""
OnToolTipOpening(self: FrameworkElement,e: ToolTipEventArgs)
Invoked whenever the System.Windows.FrameworkElement.ToolTipOpening routed
event reaches this class in its route. Implement this method to add class
handling for this event.
e: Provides data about the event.
OnToolTipOpening(self: Window_16$17,e: ToolTipEventArgs)OnToolTipOpening(self: Label_17$18,e: ToolTipEventArgs)OnToolTipOpening(self: TextBox_18$19,e: ToolTipEventArgs)OnToolTipOpening(self: Button_19$20,e: ToolTipEventArgs)OnToolTipOpening(self: CheckBox_20$21,e: ToolTipEventArgs)OnToolTipOpening(self: ComboBox_21$22,e: ToolTipEventArgs)OnToolTipOpening(self: Separator_22$23,e: ToolTipEventArgs)
"""
pass
def OnTouchDown(self,*args):
"""
OnTouchDown(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchDown routed event
that occurs when a touch presses inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchDown(self: Window_16$17,e: TouchEventArgs)OnTouchDown(self: Label_17$18,e: TouchEventArgs)OnTouchDown(self: TextBox_18$19,e: TouchEventArgs)OnTouchDown(self: Button_19$20,e: TouchEventArgs)OnTouchDown(self: CheckBox_20$21,e: TouchEventArgs)OnTouchDown(self: ComboBox_21$22,e: TouchEventArgs)OnTouchDown(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchEnter(self,*args):
"""
OnTouchEnter(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchEnter routed
event that occurs when a touch moves from outside to inside the bounds of this
element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchEnter(self: Window_16$17,e: TouchEventArgs)OnTouchEnter(self: Label_17$18,e: TouchEventArgs)OnTouchEnter(self: TextBox_18$19,e: TouchEventArgs)OnTouchEnter(self: Button_19$20,e: TouchEventArgs)OnTouchEnter(self: CheckBox_20$21,e: TouchEventArgs)OnTouchEnter(self: ComboBox_21$22,e: TouchEventArgs)OnTouchEnter(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchLeave(self,*args):
"""
OnTouchLeave(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchLeave routed
event that occurs when a touch moves from inside to outside the bounds of this
System.Windows.UIElement.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchLeave(self: Window_16$17,e: TouchEventArgs)OnTouchLeave(self: Label_17$18,e: TouchEventArgs)OnTouchLeave(self: TextBox_18$19,e: TouchEventArgs)OnTouchLeave(self: Button_19$20,e: TouchEventArgs)OnTouchLeave(self: CheckBox_20$21,e: TouchEventArgs)OnTouchLeave(self: ComboBox_21$22,e: TouchEventArgs)OnTouchLeave(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchMove(self,*args):
"""
OnTouchMove(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchMove routed event
that occurs when a touch moves while inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchMove(self: Window_16$17,e: TouchEventArgs)OnTouchMove(self: Label_17$18,e: TouchEventArgs)OnTouchMove(self: TextBox_18$19,e: TouchEventArgs)OnTouchMove(self: Button_19$20,e: TouchEventArgs)OnTouchMove(self: CheckBox_20$21,e: TouchEventArgs)OnTouchMove(self: ComboBox_21$22,e: TouchEventArgs)OnTouchMove(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnTouchUp(self,*args):
"""
OnTouchUp(self: UIElement,e: TouchEventArgs)
Provides class handling for the System.Windows.UIElement.TouchUp routed event
that occurs when a touch is released inside this element.
e: A System.Windows.Input.TouchEventArgs that contains the event data.
OnTouchUp(self: Window_16$17,e: TouchEventArgs)OnTouchUp(self: Label_17$18,e: TouchEventArgs)OnTouchUp(self: TextBox_18$19,e: TouchEventArgs)OnTouchUp(self: Button_19$20,e: TouchEventArgs)OnTouchUp(self: CheckBox_20$21,e: TouchEventArgs)OnTouchUp(self: ComboBox_21$22,e: TouchEventArgs)OnTouchUp(self: Separator_22$23,e: TouchEventArgs)
"""
pass
def OnVisualChildrenChanged(self,*args):
"""
OnVisualChildrenChanged(self: Visual,visualAdded: DependencyObject,visualRemoved: DependencyObject)
Called when the System.Windows.Media.VisualCollection of the visual object is
modified.
visualAdded: The System.Windows.Media.Visual that was added to the collection
visualRemoved: The System.Windows.Media.Visual that was removed from the collection
OnVisualChildrenChanged(self: Window_16$17,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Label_17$18,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: TextBox_18$19,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Button_19$20,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: CheckBox_20$21,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: ComboBox_21$22,visualAdded: DependencyObject,visualRemoved: DependencyObject)OnVisualChildrenChanged(self: Separator_22$23,visualAdded: DependencyObject,visualRemoved: DependencyObject)
"""
pass
def OnVisualParentChanged(self,*args):
"""
OnVisualParentChanged(self: FrameworkElement,oldParent: DependencyObject)
Invoked when the parent of this element in the visual tree is changed.
Overrides
System.Windows.UIElement.OnVisualParentChanged(System.Windows.DependencyObject).
oldParent: The old parent element. May be null to indicate that the element did not have a
visual parent previously.
OnVisualParentChanged(self: Window_16$17,oldParent: DependencyObject)OnVisualParentChanged(self: Label_17$18,oldParent: DependencyObject)OnVisualParentChanged(self: TextBox_18$19,oldParent: DependencyObject)OnVisualParentChanged(self: Button_19$20,oldParent: DependencyObject)OnVisualParentChanged(self: CheckBox_20$21,oldParent: DependencyObject)OnVisualParentChanged(self: ComboBox_21$22,oldParent: DependencyObject)OnVisualParentChanged(self: Separator_22$23,oldParent: DependencyObject)
"""
pass
def ParentLayoutInvalidated(self,*args):
"""
ParentLayoutInvalidated(self: FrameworkElement,child: UIElement)
Supports incremental layout implementations in specialized subclasses of
System.Windows.FrameworkElement.
System.Windows.FrameworkElement.ParentLayoutInvalidated(System.Windows.UIElement
) is invoked when a child element has invalidated a property that is marked in
metadata as affecting the parent's measure or arrange passes during layout.
child: The child element reporting the change.
ParentLayoutInvalidated(self: Window_16$17,child: UIElement)ParentLayoutInvalidated(self: Label_17$18,child: UIElement)ParentLayoutInvalidated(self: TextBox_18$19,child: UIElement)ParentLayoutInvalidated(self: Button_19$20,child: UIElement)ParentLayoutInvalidated(self: CheckBox_20$21,child: UIElement)ParentLayoutInvalidated(self: ComboBox_21$22,child: UIElement)ParentLayoutInvalidated(self: Separator_22$23,child: UIElement)
"""
pass
def RemoveLogicalChild(self,*args):
"""
RemoveLogicalChild(self: FrameworkElement,child: object)
Removes the provided object from this element's logical tree.
System.Windows.FrameworkElement updates the affected logical tree parent
pointers to keep in sync with this deletion.
child: The element to remove.
RemoveLogicalChild(self: Window_16$17,child: object)RemoveLogicalChild(self: Label_17$18,child: object)RemoveLogicalChild(self: TextBox_18$19,child: object)RemoveLogicalChild(self: Button_19$20,child: object)RemoveLogicalChild(self: CheckBox_20$21,child: object)RemoveLogicalChild(self: ComboBox_21$22,child: object)RemoveLogicalChild(self: Separator_22$23,child: object)
"""
pass
def RemoveVisualChild(self,*args):
"""
RemoveVisualChild(self: Visual,child: Visual)
Removes the parent-child relationship between two visuals.
child: The child visual object to remove from the parent visual.
RemoveVisualChild(self: Window_16$17,child: Window_16$17)RemoveVisualChild(self: Label_17$18,child: Label_17$18)RemoveVisualChild(self: TextBox_18$19,child: TextBox_18$19)RemoveVisualChild(self: Button_19$20,child: Button_19$20)RemoveVisualChild(self: CheckBox_20$21,child: CheckBox_20$21)RemoveVisualChild(self: ComboBox_21$22,child: ComboBox_21$22)RemoveVisualChild(self: Separator_22$23,child: Separator_22$23)
"""
pass
def ShouldSerializeProperty(self,*args):
"""
ShouldSerializeProperty(self: DependencyObject,dp: DependencyProperty) -> bool
Returns a value that indicates whether serialization processes should serialize
the value for the provided dependency property.
dp: The identifier for the dependency property that should be serialized.
Returns: true if the dependency property that is supplied should be value-serialized;
otherwise,false.
ShouldSerializeProperty(self: Window_16$17,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Label_17$18,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: TextBox_18$19,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Button_19$20,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: CheckBox_20$21,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: ComboBox_21$22,dp: DependencyProperty) -> bool
ShouldSerializeProperty(self: Separator_22$23,dp: DependencyProperty) -> bool
"""
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __str__(self,*args):
pass
DefaultStyleKey=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the key to use to reference the style for this control,when theme styles are used or defined.
"""
HandlesScrolling=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that indicates whether a control supports scrolling.
"""
HasEffectiveKeyboardFocus=property(lambda self: object(),lambda self,v: None,lambda self: None)
InheritanceBehavior=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the scope limits for property value inheritance,resource key lookup,and RelativeSource FindAncestor lookup.
"""
IsEnabledCore=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a value that becomes the return value of System.Windows.UIElement.IsEnabled in derived classes.
"""
LogicalChildren=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets an enumerator to the logical child elements of the System.Windows.Controls.ControlTemplate.
"""
StylusPlugIns=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets a collection of all stylus plug-in (customization) objects associated with this element.
"""
VisualBitmapEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffect value for the System.Windows.Media.Visual.
"""
VisualBitmapEffectInput=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Effects.BitmapEffectInput value for the System.Windows.Media.Visual.
"""
VisualBitmapScalingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.BitmapScalingMode for the System.Windows.Media.Visual.
"""
VisualCacheMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a cached representation of the System.Windows.Media.Visual.
"""
VisualChildrenCount=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the number of visual child elements within this element.
"""
VisualClearTypeHint=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.ClearTypeHint that determines how ClearType is rendered in the System.Windows.Media.Visual.
"""
VisualClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the clip region of the System.Windows.Media.Visual as a System.Windows.Media.Geometry value.
"""
VisualEdgeMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the edge mode of the System.Windows.Media.Visual as an System.Windows.Media.EdgeMode value.
"""
VisualEffect=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the bitmap effect to apply to the System.Windows.Media.Visual.
"""
VisualOffset=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the offset value of the visual object.
"""
VisualOpacity=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the opacity of the System.Windows.Media.Visual.
"""
VisualOpacityMask=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Brush value that represents the opacity mask of the System.Windows.Media.Visual.
"""
VisualParent=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the visual tree parent of the visual object.
"""
VisualScrollableAreaClip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a clipped scrollable area for the System.Windows.Media.Visual.
"""
VisualTextHintingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextHintingMode of the System.Windows.Media.Visual.
"""
VisualTextRenderingMode=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.TextRenderingMode of the System.Windows.Media.Visual.
"""
VisualTransform=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the System.Windows.Media.Transform value for the System.Windows.Media.Visual.
"""
VisualXSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the x-coordinate (vertical) guideline collection.
"""
VisualYSnappingGuidelines=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets the y-coordinate (horizontal) guideline collection.
"""
| 64.773585 | 745 | 0.787874 |
acf4d26d0ab71397dc7bf1bb5f13653ea5e1910e | 197 | py | Python | tasks/fl/fl_user.py | wongyuanneng/FYP-Attacks-against-FL | 9c267237aad19d6edb409a007922714cbf91a9c0 | [
"MIT"
] | 2 | 2022-01-27T07:46:34.000Z | 2022-01-27T07:46:46.000Z | tasks/fl/fl_user.py | wongyuanneng/FYP-Attacks-against-FL | 9c267237aad19d6edb409a007922714cbf91a9c0 | [
"MIT"
] | null | null | null | tasks/fl/fl_user.py | wongyuanneng/FYP-Attacks-against-FL | 9c267237aad19d6edb409a007922714cbf91a9c0 | [
"MIT"
] | null | null | null | from dataclasses import dataclass
from torch.utils.data.dataloader import DataLoader
@dataclass
class FLUser:
user_id: int = 0
compromised: bool = False
train_loader: DataLoader = None | 24.625 | 50 | 0.766497 |
acf4d2a287564bc9f32bfaa59918d16ff3dac754 | 5,829 | py | Python | lib/praw/models/auth.py | KODeKarnage/script.module.praw | 70e208aa0f582df5180a033794ac1aa705b7d21f | [
"BSD-2-Clause"
] | null | null | null | lib/praw/models/auth.py | KODeKarnage/script.module.praw | 70e208aa0f582df5180a033794ac1aa705b7d21f | [
"BSD-2-Clause"
] | null | null | null | lib/praw/models/auth.py | KODeKarnage/script.module.praw | 70e208aa0f582df5180a033794ac1aa705b7d21f | [
"BSD-2-Clause"
] | null | null | null | """Provide the Auth class."""
# workaround to allow both praw and prawcore in the same module
import os
import sys
cfd = os.path.dirname(os.path.abspath(__file__))
cfd = os.path.dirname(cfd)
sys.path.append( os.path.join( cfd , 'prawcore') )
# /workaround
from prawcore import (Authorizer, ImplicitAuthorizer, TrustedAuthenticator,
UntrustedAuthenticator, session)
from .base import PRAWBase
from ..exceptions import ClientException
class Auth(PRAWBase):
"""Auth provides an interface to Reddit's authorization."""
@property
def limits(self):
"""Return a dictionary containing the rate limit info.
The keys are:
:remaining: The number of requests remaining to be made in the
current rate limit window.
:reset_timestamp: A unix timestamp providing an upper bound on when the
rate limit counters will reset.
:used: The number of requests made in the current rate limit
window.
All values are initially ``None`` as these values are set in response
to issued requests.
The ``reset_timestamp`` value is an upper bound as the real timestamp
is computed on Reddit's end in preparation for sending the
response. This value may change slightly within a given window due to
slight changes in response times and rounding.
"""
data = self._reddit._core._rate_limiter
return {'remaining': data.remaining,
'reset_timestamp': data.reset_timestamp, 'used': data.used}
def authorize(self, code):
"""Complete the web authorization flow and return the refresh token.
:param code: The code obtained through the request to the redirect uri.
:returns: The obtained refresh token, if available, otherwise ``None``.
The session's active authorization will be updated upon success.
"""
authenticator = self._reddit._read_only_core._authorizer._authenticator
if not isinstance(authenticator, TrustedAuthenticator) or \
self._reddit.config.username:
raise ClientException('authorize can only be used with web apps.')
authorizer = Authorizer(authenticator)
authorizer.authorize(code)
authorized_session = session(authorizer)
self._reddit._core = self._reddit._authorized_core = authorized_session
return authorizer.refresh_token
def implicit(self, access_token, expires_in, scope):
"""Set the active authorization to be an implicit authorization.
:param access_token: The access_token obtained from Reddit's callback.
:param expires_in: The number of seconds the ``access_token`` is valid
for. The origin of this value was returned from Reddit's callback.
You may need to subtract an offset before passing in this number to
account for a delay between when Reddit prepared the response, and
when you make this function call.
:param scope: A space-delimited string of Reddit OAuth2 scope names as
returned from Reddit's callback.
Raise class:`.ClientException` if :class:`.Reddit` was initialized for
a non-installed application type.
"""
authenticator = self._reddit._read_only_core._authorizer._authenticator
if not isinstance(authenticator, UntrustedAuthenticator):
raise ClientException('implicit can only be used with installed '
'apps.')
implicit_session = session(ImplicitAuthorizer(
authenticator, access_token, expires_in, scope))
self._reddit._core = self._reddit._authorized_core = implicit_session
def scopes(self):
"""Return a set of scopes included in the current authorization.
For read-only authorizations this should return ``{'*'}``.
"""
authorizer = self._reddit._core._authorizer
if not authorizer.is_valid():
authorizer.refresh()
return authorizer.scopes
def url(self, scopes, state, duration='permanent', implicit=False):
"""Return the URL used out-of-band to grant access to your application.
:param scopes: A list of OAuth scopes to request authorization for.
:param state: A string that will be reflected in the callback to
``redirect_uri``. This value should be temporarily unique to the
client for whom the URL was generated for.
:param duration: Either ``permanent`` or ``temporary`` (default:
permanent). ``temporary`` authorizations generate access tokens
that last only 1 hour. ``permanent`` authorizations additionally
generate a refresh token that can be indefinitely used to generate
new hour-long access tokens. This value is ignored when
``implicit=True``.
:param implicit: For **installed** applications, this value can be set
to use the implicit, rather than the code flow. When True, the
``duration`` argument has no effect as only temporary tokens can be
retrieved.
"""
authenticator = self._reddit._read_only_core._authorizer._authenticator
if authenticator.redirect_uri is self._reddit.config.CONFIG_NOT_SET:
raise ClientException('redirect_uri must be provided')
if isinstance(authenticator, UntrustedAuthenticator):
return authenticator.authorize_url(
'temporary' if implicit else duration, scopes, state,
implicit=implicit)
elif implicit:
raise ClientException('implicit can only be set for installed '
'applications')
return authenticator.authorize_url(duration, scopes, state)
| 44.496183 | 79 | 0.671299 |
acf4d2deea0874d84d9c7791c7ac2e934219feea | 6,384 | py | Python | tests/database_querying.py | brightway-lca/brightway2-data | 34296248c777fa5023338bb32754d3d3426276cc | [
"BSD-3-Clause"
] | null | null | null | tests/database_querying.py | brightway-lca/brightway2-data | 34296248c777fa5023338bb32754d3d3426276cc | [
"BSD-3-Clause"
] | 28 | 2020-03-03T09:15:38.000Z | 2022-02-25T13:55:49.000Z | tests/database_querying.py | brightway-lca/brightway2-data-copy2 | a1a4bdf681054d52ae8a0cf0a6a76d1e2bb135da | [
"BSD-3-Clause"
] | 6 | 2020-03-03T10:23:44.000Z | 2021-12-21T08:45:03.000Z | from bw2data import projects
from bw2data.backends import Activity as PWActivity
from bw2data.backends import ActivityDataset
from bw2data.backends import Exchange as PWExchange
from bw2data.backends import ExchangeDataset
from bw2data.backends.utils import convert_backend
from bw2data.database import DatabaseChooser
from bw2data.errors import (
InvalidExchange,
MissingIntermediateData,
UnknownObject,
UntypedExchange,
ValidityError,
)
from bw2data.meta import databases, geomapping, methods
from bw2data.tests import BW2DataTest
class DatabaseQuerysetTest(BW2DataTest):
def extra_setup(self):
self.db = DatabaseChooser("Order!")
self.db.write(
{
("Order!", "first"): {
"name": "a",
"location": "delaware",
"reference product": "widget",
},
("Order!", "second"): {
"name": "b",
"location": "carolina",
"reference product": "wiggle",
},
("Order!", "third"): {
"name": "c",
"location": "baseball",
"reference product": "lollipop",
},
("Order!", "fourth"): {
"name": "d",
"location": "alabama",
"reference product": "widget",
},
}
)
def test_setup_clean(self):
self.assertEqual(len(databases), 1)
self.assertTrue("Order!" in databases)
self.assertEqual(list(methods), [])
self.assertEqual(len(geomapping), 5) # GLO
self.assertTrue("GLO" in geomapping)
self.assertTrue("carolina" in geomapping)
self.assertEqual(len(projects), 1) # Default project
self.assertTrue("default" in projects)
def test_random_with_global_filters(self):
self.db.filters = {"product": "lollipop"}
for _ in range(10):
self.assertEqual(self.db.random()["name"], "c")
def test_random_with_local_filters(self):
for _ in range(10):
self.assertEqual(
self.db.random(filters={"product": "lollipop"})["name"], "c"
)
def test_random_with_local_and_global_filters(self):
db = DatabaseChooser("Newt")
db.write(
{
("Newt", "first"): {
"name": "a",
"location": "delaware",
"reference product": "widget",
},
("Newt", "second"): {
"name": "b",
"location": "delaware",
"reference product": "wiggle",
},
("Newt", "third"): {
"name": "c",
"location": "alabama",
"reference product": "widget",
},
("Newt", "fourth"): {
"name": "d",
"location": "alabama",
"reference product": "wiggle",
},
}
)
self.assertTrue(len({db.random()["name"] for _ in range(10)}) > 1)
db.filters = {"product": "widget"}
for _ in range(10):
self.assertEqual(
self.db.random(filters={"location": "delaware"})["name"], "a"
)
def test_contains_respects_filters(self):
self.db.filters = {"product": "lollipop"}
self.assertFalse(("Order!", "fourth") in self.db)
def test_get_ignores_filters(self):
self.db.filters = {"product": "giggles"}
self.assertEqual(self.db.get("fourth")["name"], "d")
def test_filter(self):
self.db.filters = {"product": "widget"}
self.assertEqual(len([x for x in self.db]), 2)
def test_order_by(self):
self.db.order_by = "name"
self.assertEqual([x["name"] for x in self.db], ["a", "b", "c", "d"])
def test_order_by_bad_field(self):
with self.assertRaises(AssertionError):
self.db.order_by = "poopy"
def test_filter_bad_field(self):
with self.assertRaises(AssertionError):
self.db.filters = {"poopy": "yuck"}
def test_filter_not_dict(self):
with self.assertRaises(AssertionError):
self.db.filters = "poopy"
def test_reset_order_by(self):
self.db.order_by = "name"
self.db.order_by = None
as_lists = [[x["name"] for x in self.db] for _ in range(10)]
first_elements = {x[0] for x in as_lists}
self.assertTrue(len(first_elements) > 1)
def test_reset_filters(self):
self.db.filters = {"product": "widget"}
self.assertEqual(len([x for x in self.db]), 2)
self.db.filters = None
self.assertEqual(len([x for x in self.db]), 4)
def test_len_respects_filters(self):
self.db.filters = {"product": "widget"}
self.assertEqual(len(self.db), 2)
def test_make_searchable_unknown_object(self):
db = DatabaseChooser("mysterious")
with self.assertRaises(UnknownObject):
db.make_searchable()
def test_convert_same_backend(self):
database = DatabaseChooser("a database")
database.write(
{
("a database", "foo"): {
"exchanges": [
{
"input": ("a database", "foo"),
"amount": 1,
"type": "production",
}
],
"location": "bar",
"name": "baz",
},
}
)
self.assertFalse(convert_backend("a database", "sqlite"))
def test_convert_backend(self):
self.maxDiff = None
database = DatabaseChooser("a database")
database.write(
{
("a database", "foo"): {
"exchanges": [
{
"input": ("a database", "foo"),
"amount": 1,
"type": "production",
}
],
"location": "bar",
"name": "baz",
},
}
)
| 33.957447 | 77 | 0.488878 |
acf4d2ecdf0b43c7c51850d1399aa5669521d632 | 866 | py | Python | var/spack/repos/builtin/packages/tbl2asn/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360 | 2017-11-06T08:47:01.000Z | 2022-03-31T14:45:33.000Z | var/spack/repos/builtin/packages/tbl2asn/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838 | 2017-11-04T07:49:45.000Z | 2022-03-31T23:38:39.000Z | var/spack/repos/builtin/packages/tbl2asn/package.py | LiamBindle/spack | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793 | 2017-11-04T07:45:50.000Z | 2022-03-30T14:31:53.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from os import chmod
from spack import *
class Tbl2asn(Package):
"""Tbl2asn is a command-line program that automates the creation of
sequence records for submission to GenBank."""
homepage = "https://www.ncbi.nlm.nih.gov/genbank/tbl2asn2/"
version('2020-03-01', sha256='7cc1119d3cfcbbffdbd4ecf33cef8bbdd44fc5625c72976bee08b1157625377e')
def url_for_version(self, ver):
return "https://ftp.ncbi.nih.gov/toolbox/ncbi_tools/converters/by_program/tbl2asn/linux.tbl2asn.gz"
def install(self, spec, prefix):
mkdirp(prefix.bin)
install('../linux.tbl2asn', prefix.bin.tbl2asn)
chmod(prefix.bin.tbl2asn, 0o775)
| 33.307692 | 107 | 0.729792 |
acf4d393e46e23e5a61321224bf19e9834427862 | 12,033 | py | Python | nuage_neutron/plugins/nuage_ml2/securitygroup.py | krissterckx/nuage-openstack-neutron | ff0ddd995328c502a0b903a2f2bc30ebbb3e9ddb | [
"Apache-2.0"
] | null | null | null | nuage_neutron/plugins/nuage_ml2/securitygroup.py | krissterckx/nuage-openstack-neutron | ff0ddd995328c502a0b903a2f2bc30ebbb3e9ddb | [
"Apache-2.0"
] | null | null | null | nuage_neutron/plugins/nuage_ml2/securitygroup.py | krissterckx/nuage-openstack-neutron | ff0ddd995328c502a0b903a2f2bc30ebbb3e9ddb | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 NOKIA
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron._i18n import _
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.db.common_db_mixin import CommonDbMixin
from neutron.extensions import securitygroup as ext_sg
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
from oslo_utils import excutils
from nuage_neutron.plugins.common import base_plugin
from nuage_neutron.plugins.common import constants
from nuage_neutron.plugins.common import exceptions as nuage_exc
from nuage_neutron.plugins.common.time_tracker import TimeTracker
from nuage_neutron.plugins.common import utils as nuage_utils
from nuage_neutron.vsdclient.common import cms_id_helper
from nuage_neutron.vsdclient import restproxy
LOG = logging.getLogger(__name__)
class NuageSecurityGroup(base_plugin.BaseNuagePlugin,
CommonDbMixin):
def __init__(self):
super(NuageSecurityGroup, self).__init__()
self._l2_plugin = None
@property
def core_plugin(self):
if self._l2_plugin is None:
self._l2_plugin = directory.get_plugin()
return self._l2_plugin
def register(self):
self.nuage_callbacks.subscribe(self.post_port_create,
resources.PORT, constants.AFTER_CREATE)
self.nuage_callbacks.subscribe(self.post_port_update,
resources.PORT, constants.AFTER_UPDATE)
self.nuage_callbacks.subscribe(self.post_port_delete,
resources.PORT, constants.AFTER_DELETE)
registry.subscribe(self.pre_delete_security_group,
resources.SECURITY_GROUP,
events.BEFORE_DELETE)
registry.subscribe(self.pre_create_security_group_rule,
resources.SECURITY_GROUP_RULE,
events.BEFORE_CREATE)
registry.subscribe(self.post_create_security_group_rule,
resources.SECURITY_GROUP_RULE,
events.AFTER_CREATE)
registry.subscribe(self.pre_delete_security_group_rule,
resources.SECURITY_GROUP_RULE,
events.BEFORE_DELETE)
@nuage_utils.handle_nuage_api_error
@log_helpers.log_method_call
@TimeTracker.tracked
def pre_delete_security_group(self, resource, event, trigger, **kwargs):
self.vsdclient.delete_nuage_secgroup(kwargs['security_group_id'])
@nuage_utils.handle_nuage_api_error
@log_helpers.log_method_call
@TimeTracker.tracked
def pre_create_security_group_rule(self, resource, event, trigger,
**kwargs):
self.vsdclient.validate_nuage_sg_rule_definition(
kwargs['security_group_rule'])
@nuage_utils.handle_nuage_api_error
@log_helpers.log_method_call
@TimeTracker.tracked
def post_create_security_group_rule(self, resource, event, trigger,
**kwargs):
remote_sg = None
context = kwargs['context']
sg_rule = kwargs['security_group_rule']
sg_id = sg_rule['security_group_id']
if sg_rule.get('remote_group_id'):
remote_sg = self.core_plugin.get_security_group(
context, sg_rule.get('remote_group_id'))
try:
nuage_policygroup = self.vsdclient.get_sg_policygroup_mapping(
sg_id)
if nuage_policygroup:
sg_params = {
'sg_id': sg_id,
'neutron_sg_rule': sg_rule,
'policygroup': nuage_policygroup
}
if remote_sg:
sg_params['remote_group_name'] = remote_sg['name']
self.vsdclient.create_nuage_sgrule(sg_params)
except Exception:
with excutils.save_and_reraise_exception():
self.core_plugin.delete_security_group_rule(context,
sg_rule['id'])
@nuage_utils.handle_nuage_api_error
@log_helpers.log_method_call
@TimeTracker.tracked
def pre_delete_security_group_rule(self, resource, event, trigger,
**kwargs):
context = kwargs['context']
id = kwargs['security_group_rule_id']
local_sg_rule = self.core_plugin.get_security_group_rule(context, id)
self.vsdclient.delete_nuage_sgrule([local_sg_rule])
@TimeTracker.tracked
def post_port_create(self, resource, event, trigger, **kwargs):
context = kwargs['context']
port = kwargs['port']
subnet_mapping = kwargs['subnet_mapping']
if subnet_mapping['nuage_managed_subnet']:
return
vsd_subnet = self.vsdclient.get_nuage_subnet_by_id(subnet_mapping)
if port[ext_sg.SECURITYGROUPS]:
self._process_port_security_group(context,
port,
kwargs['vport'],
port[ext_sg.SECURITYGROUPS],
vsd_subnet)
@TimeTracker.tracked
def post_port_update(self, resource, event, trigger, **kwargs):
update_sg = True
context = kwargs['context']
updated_port = kwargs['updated_port']
original_port = kwargs['original_port']
rollbacks = kwargs['rollbacks']
subnet_mapping = kwargs['subnet_mapping']
if subnet_mapping['nuage_managed_subnet']:
return
new_sg = (set(updated_port.get(ext_sg.SECURITYGROUPS)) if
updated_port.get(ext_sg.SECURITYGROUPS) else set())
orig_sg = (set(original_port.get(ext_sg.SECURITYGROUPS)) if
original_port.get(ext_sg.SECURITYGROUPS) else set())
if not new_sg and new_sg == orig_sg:
update_sg = False
if update_sg:
vsd_subnet = self.vsdclient.get_nuage_subnet_by_id(subnet_mapping)
self._process_port_security_group(context,
updated_port,
kwargs['vport'],
new_sg,
vsd_subnet)
rollbacks.append((self._process_port_security_group,
[context, updated_port, kwargs['vport'],
original_port[ext_sg.SECURITYGROUPS],
vsd_subnet],
{}))
deleted_sg_ids = (set(original_port[ext_sg.SECURITYGROUPS]) -
set(updated_port[ext_sg.SECURITYGROUPS]))
self.vsdclient.check_unused_policygroups(deleted_sg_ids)
@TimeTracker.tracked
def post_port_delete(self, resource, event, trigger, **kwargs):
port = kwargs['port']
subnet_mapping = kwargs['subnet_mapping']
if subnet_mapping['nuage_managed_subnet']:
return
securitygroups = port.get(ext_sg.SECURITYGROUPS, [])
successful = False
attempt = 1
while not successful:
try:
self.vsdclient.check_unused_policygroups(securitygroups)
successful = True
except restproxy.RESTProxyError as e:
msg = e.msg.lower()
if (e.code not in (404, 409) and 'policygroup' not in msg and
'policy group' not in msg):
raise
elif attempt < 3:
attempt += 1
else:
raise
@log_helpers.log_method_call
def _process_port_security_group(self, context, port, vport, sg_ids,
vsd_subnet):
if len(sg_ids) > 6:
msg = (_("Exceeds maximum num of security groups on a port "
"supported on nuage VSP"))
raise nuage_exc.NuageBadRequest(msg=msg)
if not port.get('fixed_ips'):
return
successful = False
attempt = 1
max_attempts = 3
while not successful:
try:
policygroup_ids = []
for sg_id in sg_ids:
vsd_policygroup = self._find_or_create_policygroup(
context, sg_id, vsd_subnet)
policygroup_ids.append(vsd_policygroup['ID'])
self.vsdclient.update_vport_policygroups(vport['ID'],
policygroup_ids)
successful = True
except restproxy.RESTProxyError as e:
msg = e.msg.lower()
if (e.code not in (404, 409) and 'policygroup' not in msg and
'policy group' not in msg):
raise
elif attempt < max_attempts:
attempt += 1
else:
LOG.debug("Retry failed %s times.", max_attempts)
raise
def _find_or_create_policygroup(self, context, security_group_id,
vsd_subnet):
external_id = cms_id_helper.get_vsd_external_id(security_group_id)
if vsd_subnet['type'] == constants.L2DOMAIN:
policygroups = self.vsdclient.get_nuage_l2domain_policy_groups(
vsd_subnet['ID'],
externalID=external_id)
else:
domain_id = self.vsdclient.get_router_by_domain_subnet_id(
vsd_subnet['ID'])
policygroups = self.vsdclient.get_nuage_domain_policy_groups(
domain_id,
externalID=external_id)
if len(policygroups) > 1:
msg = _("Found multiple policygroups with externalID %s")
raise n_exc.Conflict(msg=msg % external_id)
elif len(policygroups) == 1:
return policygroups[0]
else:
return self._create_policygroup(context, security_group_id,
vsd_subnet)
def _create_policygroup(self, context, security_group_id, vsd_subnet):
security_group = self.core_plugin.get_security_group(context,
security_group_id)
# pop rules, make empty policygroup first
security_group_rules = security_group.pop('security_group_rules')
policy_group = self.vsdclient.create_security_group(vsd_subnet,
security_group)
# Before creating rules, we might have to make other policygroups first
# if the rule uses remote_group_id to have rule related to other PG.
for rule in security_group_rules:
remote_sg_id = rule.get('remote_group_id')
if remote_sg_id:
self._find_or_create_policygroup(context,
remote_sg_id,
vsd_subnet)
self.vsdclient.create_security_group_rules(policy_group,
security_group_rules)
return policy_group
| 43.916058 | 79 | 0.587883 |
acf4d51dda5fed5bdaed73c154b821cf48debb4a | 3,377 | py | Python | services/parser-database/tests/test_endpoints.py | jaimehisao/major-tom | e991616deb6fc6d86ff3752c4b2136578b3d11f3 | [
"Apache-2.0"
] | 6 | 2020-07-15T18:28:38.000Z | 2021-04-20T13:39:26.000Z | services/parser-database/tests/test_endpoints.py | jaimehisao/major-tom | e991616deb6fc6d86ff3752c4b2136578b3d11f3 | [
"Apache-2.0"
] | 61 | 2020-07-20T21:52:59.000Z | 2021-06-18T16:53:34.000Z | services/parser-database/tests/test_endpoints.py | jaimehisao/major-tom | e991616deb6fc6d86ff3752c4b2136578b3d11f3 | [
"Apache-2.0"
] | 10 | 2020-07-17T21:44:09.000Z | 2021-04-26T22:53:33.000Z | from unittest.mock import MagicMock
import random
import json
import retriever
class MockRetrieverResponse:
@staticmethod
def get_document():
return None
def load_data(endpoint, *args, **kwargs):
splited_text = endpoint.split()
keywords = [
splited_text[random.randint(0, len(splited_text) - 1)],
splited_text[random.randint(0, len(splited_text) - 1)],
]
return keywords
def test_parse_endpoint(app, client, monkeypatch, mocker):
def mock_retriever(*args, **kwargs):
return MockRetrieverResponse()
mock_keywords = MagicMock(name="get_keywords")
mock_keywords.get.side_effect = load_data
mocker.patch("connector.get_keywords", new=mock_keywords)
monkeypatch.setattr(retriever, "get_document", mock_retriever)
res = client.post("/parse")
assert res.status_code == 200
def test_if_article_119_was_parsed(app, client):
app.config['JSON_AS_ASCII'] = False
res = client.get("/articles/119")
data = json.loads(res.get_data(as_text=True))
must_match = {
'id': '119',
'number': 119,
'content': "Las escuelas deberán contar con lugares especiales y debidamente señalizados \n\npara que los vehículos de transporte escolar efectúen el ascenso y descenso de los escolares, sin que \n\nafecte u obstaculice la circulación en la vía pública. En caso de que el lugar de ascenso y descenso \n\nde escolares, ocasione conflictos viales, o ponga en riesgo la integridad física de los mismos, dichos \n\nlugares serán ubicados en las inmediaciones de los planteles donde técnicamente sea factible a \n\npropuesta de las escuelas y previa autorización de la Autoridad Municipal, observando de manera \n\nprimordial lo necesario para garantizar la seguridad de los escolares.", # noqa: E501
'wordCount': 100
}
assert data == must_match
def test_if_article_25_was_parsed(app, client):
app.config['JSON_AS_ASCII'] = False
res = client.get("/articles/25")
data = json.loads(res.get_data(as_text=True))
must_match = {
'id': '25',
'number': 25,
'content': "Queda prohibido que los vehículos que circulen en la vía pública porten los accesorios \n\no artículos siguientes: \n\nI. \n\nII. \n\nFaros encendidos o reflejantes de colores diferentes al blanco o ámbar en la parte delantera; \n\nFaros encendidos o reflejantes de colores diferentes al rojo o ámbar en la parte posterior; con \n\nexcepción solamente de las luces de reversa y de placa; \n\n \n \n \n \n \n \n\fIII. \n\nDispositivos de rodamiento con superficie metálica que haga contacto con el pavimento. Esto \n\nincluye cadenas sobre las llantas; \n\nIV. \n\nRadios que utilicen la frecuencia de la Dependencia de Tránsito correspondiente o cualquier \n\notro cuerpo de seguridad; \n\nV. \n\nPiezas del vehículo que no estén debidamente sujetas de tal forma que puedan desprenderse \n\nconstituyendo un peligro; \n\nVI. \n\nSirena o aparatos que emitan sonidos semejantes a ella, torreta y/o luces estroboscópicas de \n\ncualquier color con excepción de los vehículos oficiales, de emergencia o especiales; \n\nArtículos u objetos que impidan u obstaculicen la visibilidad del conductor; y \n\nMofles directos, rotos o que emitan un ruido excesivo. \n\nVII. \n\nVIII.", # noqa: E501
'wordCount': 165
}
assert data == must_match
| 55.360656 | 1,209 | 0.73438 |
acf4d544a6d00687d7026eaad132ec0d50e5f25b | 155 | py | Python | Codewars/8kyu/find-the-nth-digit-of-a-number/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | 7 | 2017-09-20T16:40:39.000Z | 2021-08-31T18:15:08.000Z | Codewars/8kyu/find-the-nth-digit-of-a-number/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | Codewars/8kyu/find-the-nth-digit-of-a-number/Python/solution1.py | RevansChen/online-judge | ad1b07fee7bd3c49418becccda904e17505f3018 | [
"MIT"
] | null | null | null | # Python - 3.6.0
def find_digit(num, nth):
if nth <= 0:
return -1
num = str(abs(num))
return 0 if nth > len(num) else int(num[-nth])
| 17.222222 | 50 | 0.541935 |
acf4d58e3d092e0231eaef50f2d1f835fc50a193 | 3,424 | py | Python | dapr/clients/http/client.py | wmeints/python-sdk | 33e7d48ffbed9b175ff095d18247f328e990d4ca | [
"MIT"
] | 1 | 2021-04-05T11:29:16.000Z | 2021-04-05T11:29:16.000Z | dapr/clients/http/client.py | Chemdevil/python-sdk | b62d3b75478359a637d1e3ae162a9d21c8029bf6 | [
"MIT"
] | 12 | 2021-09-14T05:13:26.000Z | 2022-03-25T05:16:24.000Z | dapr/clients/http/client.py | Chemdevil/python-sdk | b62d3b75478359a637d1e3ae162a9d21c8029bf6 | [
"MIT"
] | 1 | 2022-02-23T12:44:51.000Z | 2022-02-23T12:44:51.000Z | # -*- coding: utf-8 -*-
"""
Copyright (c) Microsoft Corporation and Dapr Contributors.
Licensed under the MIT License.
"""
import aiohttp
from typing import Callable, Mapping, Dict, Optional, Union, Tuple, TYPE_CHECKING
if TYPE_CHECKING:
from dapr.serializers import Serializer
from dapr.conf import settings
from dapr.clients.base import DEFAULT_JSON_CONTENT_TYPE
from dapr.clients.exceptions import DaprInternalError, ERROR_CODE_DOES_NOT_EXIST, ERROR_CODE_UNKNOWN
CONTENT_TYPE_HEADER = 'content-type'
DAPR_API_TOKEN_HEADER = 'dapr-api-token'
class DaprHttpClient:
"""A Dapr Http API client"""
def __init__(self,
message_serializer: 'Serializer',
timeout: Optional[int] = 60,
headers_callback: Optional[Callable[[], Dict[str, str]]] = None):
"""Invokes Dapr over HTTP.
Args:
message_serializer (Serializer): Dapr serializer.
timeout (int, optional): Timeout in seconds, defaults to 60.
headers_callback (lambda: Dict[str, str]], optional): Generates header for each request.
"""
self._timeout = aiohttp.ClientTimeout(total=timeout)
self._serializer = message_serializer
self._headers_callback = headers_callback
def get_api_url(self) -> str:
return 'http://{}:{}/{}'.format(
settings.DAPR_RUNTIME_HOST,
settings.DAPR_HTTP_PORT,
settings.DAPR_API_VERSION)
async def send_bytes(
self, method: str, url: str,
data: Optional[bytes],
headers: Dict[str, Union[bytes, str]] = {},
query_params: Optional[Mapping] = None
) -> Tuple[bytes, aiohttp.ClientResponse]:
headers_map = headers
if not headers_map.get(CONTENT_TYPE_HEADER):
headers_map[CONTENT_TYPE_HEADER] = DEFAULT_JSON_CONTENT_TYPE
if settings.DAPR_API_TOKEN is not None:
headers_map[DAPR_API_TOKEN_HEADER] = settings.DAPR_API_TOKEN
if self._headers_callback is not None:
trace_headers = self._headers_callback()
headers_map.update(trace_headers)
r = None
async with aiohttp.ClientSession(timeout=self._timeout) as session:
r = await session.request(
method=method,
url=url,
data=data,
headers=headers_map,
params=query_params)
if r.status >= 200 and r.status < 300:
return await r.read(), r
raise (await self.convert_to_error(r))
async def convert_to_error(self, response) -> DaprInternalError:
error_info = None
try:
error_body = await response.read()
if (error_body is None or len(error_body) == 0) and response.status == 404:
return DaprInternalError("Not Found", ERROR_CODE_DOES_NOT_EXIST)
error_info = self._serializer.deserialize(error_body)
except Exception:
return DaprInternalError(f'Unknown Dapr Error. HTTP status code: {response.status}')
if error_info and isinstance(error_info, dict):
message = error_info.get('message')
error_code = error_info.get('errorCode') or ERROR_CODE_UNKNOWN
return DaprInternalError(message, error_code)
return DaprInternalError(f'Unknown Dapr Error. HTTP status code: {response.status}')
| 36.817204 | 100 | 0.64486 |
acf4d5b291cde4fc8b3876746ec282f907b68bdc | 1,520 | py | Python | colocate/plot.py | ocefpaf/ohw19-project-co_locators | a5eaa540ba81be05eda04f76e8be1f39af1a13f1 | [
"MIT"
] | null | null | null | colocate/plot.py | ocefpaf/ohw19-project-co_locators | a5eaa540ba81be05eda04f76e8be1f39af1a13f1 | [
"MIT"
] | null | null | null | colocate/plot.py | ocefpaf/ohw19-project-co_locators | a5eaa540ba81be05eda04f76e8be1f39af1a13f1 | [
"MIT"
] | null | null | null | import pandas as pd
import hvplot.pandas
import cmocean as cmo
import datashader as ds
import cartopy.crs as ccrs
import matplotlib.pyplot as plt
import geoviews as gv
import holoviews as hv
#df_coords = pd.read_csv(‘/Users/santanay/code/OHW19/ohw19-project-co_locators/coordinates.zip’, index_col=0)
import cartopy.io.shapereader as shpreader
def plot(df_coords):
data_coords = gv.Dataset(df_coords, kdims=['longitude (degrees_east)',
'latitude (degrees_north)',
'Dataset ID','dataset_count'])
points = data_coords.to(gv.Points, ['longitude (degrees_east)',
'latitude (degrees_north)'],
['Dataset ID','dataset_count'])
tiles = gv.tile_sources.EsriOceanBase
figure = tiles * points.opts(size=5, color='dataset_count', cmap='viridis',
tools=['hover'], width=600, height=600, global_extent=True)
# shpfilename = shpreader.natural_earth(resolution='10m',
# category='physical',
# name='coastline')
# figure = df_coords.hvplot.scatter(
# 'longitude (degrees_east)','latitude (degrees_north)', s=5, c='Dataset ID',
# projection=ccrs.PlateCarree(),
# width=600, height=540, cmap=cmo.cm.tempo,
# datashade=True
# ) * gv.Shape.from_shapefile(shpfilename, crs=ccrs.PlateCarree())
return figure
| 44.705882 | 109 | 0.596711 |
acf4d7225d4f32794035ea92efb6dedce9c88321 | 237 | py | Python | Taller_Diccionarios/Ejercicio_2.py | Mariajosedibo19/Talleres_de_Algoritmos | db8f1eecc345be1877d9d7a62a3fa8cf3af2df7d | [
"MIT"
] | null | null | null | Taller_Diccionarios/Ejercicio_2.py | Mariajosedibo19/Talleres_de_Algoritmos | db8f1eecc345be1877d9d7a62a3fa8cf3af2df7d | [
"MIT"
] | null | null | null | Taller_Diccionarios/Ejercicio_2.py | Mariajosedibo19/Talleres_de_Algoritmos | db8f1eecc345be1877d9d7a62a3fa8cf3af2df7d | [
"MIT"
] | null | null | null |
diccionario={'Mikel': 3, 'Ane': 8, 'Amaia': 12, 'Unai': 5, 'Jon': 8, 'Ainhoa': 7,
'Maite': 5}
lista=[]
for value in diccionario.items():
a=((value[1]))
lista.append(a)
b=(set(lista))
print(list(b))
| 11.85 | 81 | 0.50211 |
acf4d8b4b7b2a276bae0adfab8f9282fb8ba7fc6 | 1,774 | py | Python | tests/apps/call_for_proposals/test_cli.py | alysivji/github-adapter | 5e3543f41f189fbe4a50d64e3d6734dc765579b4 | [
"MIT"
] | 55 | 2019-05-05T01:20:58.000Z | 2022-01-10T18:03:05.000Z | tests/apps/call_for_proposals/test_cli.py | alysivji/github-adapter | 5e3543f41f189fbe4a50d64e3d6734dc765579b4 | [
"MIT"
] | 222 | 2019-05-03T16:31:26.000Z | 2021-08-28T23:49:03.000Z | tests/apps/call_for_proposals/test_cli.py | busy-beaver-dev/busy-beaver | 5e3543f41f189fbe4a50d64e3d6734dc765579b4 | [
"MIT"
] | 19 | 2019-04-27T19:49:32.000Z | 2020-06-30T19:52:09.000Z | import pytest
from busy_beaver.apps.call_for_proposals.cli import OpenCFPPost, post_upcoming_cfps
from tests._utilities import FakeSlackClient
MODULE_TO_TEST = "busy_beaver.apps.call_for_proposals.cli"
@pytest.fixture
def patched_slack(patcher):
obj = FakeSlackClient()
return patcher(MODULE_TO_TEST, namespace="SlackClient", replacement=obj)
@pytest.mark.unit
@pytest.mark.vcr
def test_post_upcoming_cfps(mocker, runner, factory, patched_slack):
# Arrange
config = factory.CallForProposalsConfiguration(enabled=True)
# Act
runner.invoke(post_upcoming_cfps)
# Assert
assert patched_slack.mock.call_count == 2
slack_adapter_initalize_args = patched_slack.mock.call_args_list[0]
args, kwargs = slack_adapter_initalize_args
assert config.slack_installation.bot_access_token in args
post_message_args = patched_slack.mock.call_args_list[-1]
args, kwargs = post_message_args
assert "blocks" in kwargs
assert len(kwargs["blocks"]) == 5
@pytest.mark.unit
@pytest.mark.vcr
def test_post_upcoming_cfps_enabled(mocker, runner, factory, patched_slack):
"""Only post for configurations that are enabled
Should have 2 calls:
- when initialized
- when post_message is called
"""
# Arrange
factory.CallForProposalsConfiguration(enabled=False)
factory.CallForProposalsConfiguration(enabled=True)
# Act
runner.invoke(post_upcoming_cfps)
# Assert
assert patched_slack.mock.call_count == 2
@pytest.mark.unit
@pytest.mark.vcr
def test_post_no_open_cfps_found():
"""When there are no open CFPs, let the user know)"""
# Act
result = OpenCFPPost._generate_conference_text(conference_cfps=[])
# Assert
assert "No upcoming CFPs found" in result
| 26.878788 | 83 | 0.751409 |
acf4d8deadb6666a1ce832be1c555a1b2389c145 | 5,652 | py | Python | django/utils/translation/__init__.py | hafeez3000/django | 08e1175ac8d683b692ec3c67dc31df149f07dc8f | [
"BSD-3-Clause"
] | null | null | null | django/utils/translation/__init__.py | hafeez3000/django | 08e1175ac8d683b692ec3c67dc31df149f07dc8f | [
"BSD-3-Clause"
] | null | null | null | django/utils/translation/__init__.py | hafeez3000/django | 08e1175ac8d683b692ec3c67dc31df149f07dc8f | [
"BSD-3-Clause"
] | null | null | null | """
Internationalization support.
"""
import warnings
from os import path
from django.utils.encoding import force_unicode
from django.utils.functional import lazy
from django.utils.importlib import import_module
__all__ = [
'activate', 'deactivate', 'override', 'deactivate_all',
'get_language', 'get_language_from_request',
'get_language_info', 'get_language_bidi',
'check_for_language', 'to_locale', 'templatize', 'string_concat',
'get_date_formats', 'get_partial_date_formats',
'gettext', 'gettext_lazy', 'gettext_noop',
'ugettext', 'ugettext_lazy', 'ugettext_noop',
'ngettext', 'ngettext_lazy',
'ungettext', 'ungettext_lazy',
'pgettext', 'pgettext_lazy',
'npgettext', 'npgettext_lazy',
]
# Here be dragons, so a short explanation of the logic won't hurt:
# We are trying to solve two problems: (1) access settings, in particular
# settings.USE_I18N, as late as possible, so that modules can be imported
# without having to first configure Django, and (2) if some other code creates
# a reference to one of these functions, don't break that reference when we
# replace the functions with their real counterparts (once we do access the
# settings).
class Trans(object):
"""
The purpose of this class is to store the actual translation function upon
receiving the first call to that function. After this is done, changes to
USE_I18N will have no effect to which function is served upon request. If
your tests rely on changing USE_I18N, you can delete all the functions
from _trans.__dict__.
Note that storing the function with setattr will have a noticeable
performance effect, as access to the function goes the normal path,
instead of using __getattr__.
"""
def __getattr__(self, real_name):
from django.conf import settings
if settings.USE_I18N:
from django.utils.translation import trans_real as trans
# Make sure the project's locale dir isn't in LOCALE_PATHS
if settings.SETTINGS_MODULE is not None:
parts = settings.SETTINGS_MODULE.split('.')
project = import_module(parts[0])
project_locale_path = path.normpath(
path.join(path.dirname(project.__file__), 'locale'))
normalized_locale_paths = [path.normpath(locale_path)
for locale_path in settings.LOCALE_PATHS]
if (path.isdir(project_locale_path) and
not project_locale_path in normalized_locale_paths):
warnings.warn("Translations in the project directory "
"aren't supported anymore. Use the "
"LOCALE_PATHS setting instead.",
DeprecationWarning)
else:
from django.utils.translation import trans_null as trans
setattr(self, real_name, getattr(trans, real_name))
return getattr(trans, real_name)
_trans = Trans()
# The Trans class is no more needed, so remove it from the namespace.
del Trans
def gettext_noop(message):
return _trans.gettext_noop(message)
ugettext_noop = gettext_noop
def gettext(message):
return _trans.gettext(message)
def ngettext(singular, plural, number):
return _trans.ngettext(singular, plural, number)
def ugettext(message):
return _trans.ugettext(message)
def ungettext(singular, plural, number):
return _trans.ungettext(singular, plural, number)
def pgettext(context, message):
return _trans.pgettext(context, message)
def npgettext(context, singular, plural, number):
return _trans.npgettext(context, singular, plural, number)
ngettext_lazy = lazy(ngettext, str)
gettext_lazy = lazy(gettext, str)
ungettext_lazy = lazy(ungettext, unicode)
ugettext_lazy = lazy(ugettext, unicode)
pgettext_lazy = lazy(pgettext, unicode)
npgettext_lazy = lazy(npgettext, unicode)
def activate(language):
return _trans.activate(language)
def deactivate():
return _trans.deactivate()
class override(object):
def __init__(self, language, deactivate=False):
self.language = language
self.deactivate = deactivate
self.old_language = get_language()
def __enter__(self):
activate(self.language)
def __exit__(self, exc_type, exc_value, traceback):
if self.deactivate:
deactivate()
else:
activate(self.old_language)
def get_language():
return _trans.get_language()
def get_language_bidi():
return _trans.get_language_bidi()
def get_date_formats():
return _trans.get_date_formats()
def get_partial_date_formats():
return _trans.get_partial_date_formats()
def check_for_language(lang_code):
return _trans.check_for_language(lang_code)
def to_locale(language):
return _trans.to_locale(language)
def get_language_from_request(request):
return _trans.get_language_from_request(request)
def get_language_from_path(path):
return _trans.get_language_from_path(path)
def templatize(src, origin=None):
return _trans.templatize(src, origin)
def deactivate_all():
return _trans.deactivate_all()
def _string_concat(*strings):
"""
Lazy variant of string concatenation, needed for translations that are
constructed from multiple parts.
"""
return u''.join([force_unicode(s) for s in strings])
string_concat = lazy(_string_concat, unicode)
def get_language_info(lang_code):
from django.conf.locale import LANG_INFO
try:
return LANG_INFO[lang_code]
except KeyError:
raise KeyError("Unknown language code %r." % lang_code)
| 33.247059 | 78 | 0.704883 |
acf4dbe36a107de312243eb9c322f63dd2ea1daf | 261 | py | Python | scholariumat/framework/models.py | valuehack/scholariumat | 47c13f3429b95b9ad5ca59b45cf971895260bb5c | [
"MIT"
] | null | null | null | scholariumat/framework/models.py | valuehack/scholariumat | 47c13f3429b95b9ad5ca59b45cf971895260bb5c | [
"MIT"
] | 232 | 2018-06-30T11:40:52.000Z | 2020-04-29T23:55:41.000Z | scholariumat/framework/models.py | valuehack/scholariumat | 47c13f3429b95b9ad5ca59b45cf971895260bb5c | [
"MIT"
] | 3 | 2018-05-31T12:57:03.000Z | 2020-02-27T16:25:44.000Z | from django.db import models
class Announcement(models.Model):
message = models.TextField(blank=False)
def __str__(self):
return self.message
class Meta:
verbose_name = 'Ankündigung'
verbose_name_plural = "Ankündigungen"
| 20.076923 | 45 | 0.685824 |
acf4dc5e237525541278a25b4810c1b94d564700 | 3,565 | py | Python | test/onnx/model_defs/squeezenet.py | jeejakp12/pytorch | 043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe | [
"Intel"
] | 2 | 2020-03-13T06:57:49.000Z | 2020-05-17T04:18:14.000Z | test/onnx/model_defs/squeezenet.py | jeejakp12/pytorch | 043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe | [
"Intel"
] | null | null | null | test/onnx/model_defs/squeezenet.py | jeejakp12/pytorch | 043cf1f9c746b4dda2c404ba6c76c6ccad5e2cbe | [
"Intel"
] | 1 | 2022-03-26T14:42:50.000Z | 2022-03-26T14:42:50.000Z | import torch
import torch.nn as nn
import torch.nn.init as init
class Fire(nn.Module):
def __init__(self, inplanes, squeeze_planes, expand1x1_planes, expand3x3_planes):
super(Fire, self).__init__()
self.inplanes = inplanes
self.squeeze = nn.Conv2d(inplanes, squeeze_planes, kernel_size=1)
self.squeeze_activation = nn.ReLU(inplace=True)
self.expand1x1 = nn.Conv2d(squeeze_planes, expand1x1_planes, kernel_size=1)
self.expand1x1_activation = nn.ReLU(inplace=True)
self.expand3x3 = nn.Conv2d(
squeeze_planes, expand3x3_planes, kernel_size=3, padding=1
)
self.expand3x3_activation = nn.ReLU(inplace=True)
def forward(self, x):
x = self.squeeze_activation(self.squeeze(x))
return torch.cat(
[
self.expand1x1_activation(self.expand1x1(x)),
self.expand3x3_activation(self.expand3x3(x)),
],
1,
)
class SqueezeNet(nn.Module):
def __init__(self, version=1.0, num_classes=1000, ceil_mode=False):
super(SqueezeNet, self).__init__()
if version not in [1.0, 1.1]:
raise ValueError(
"Unsupported SqueezeNet version {version}:"
"1.0 or 1.1 expected".format(version=version)
)
self.num_classes = num_classes
if version == 1.0:
self.features = nn.Sequential(
nn.Conv2d(3, 96, kernel_size=7, stride=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(96, 16, 64, 64),
Fire(128, 16, 64, 64),
Fire(128, 32, 128, 128),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(256, 32, 128, 128),
Fire(256, 48, 192, 192),
Fire(384, 48, 192, 192),
Fire(384, 64, 256, 256),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(512, 64, 256, 256),
)
else:
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(64, 16, 64, 64),
Fire(128, 16, 64, 64),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(128, 32, 128, 128),
Fire(256, 32, 128, 128),
nn.MaxPool2d(kernel_size=3, stride=2, ceil_mode=ceil_mode),
Fire(256, 48, 192, 192),
Fire(384, 48, 192, 192),
Fire(384, 64, 256, 256),
Fire(512, 64, 256, 256),
)
# Final convolution is initialized differently from the rest
final_conv = nn.Conv2d(512, self.num_classes, kernel_size=1)
self.classifier = nn.Sequential(
nn.Dropout(p=0.5), final_conv, nn.ReLU(inplace=True), nn.AvgPool2d(13)
)
for m in self.modules():
if isinstance(m, nn.Conv2d):
if m is final_conv:
init.normal_(m.weight.data, mean=0.0, std=0.01)
else:
init.kaiming_uniform_(m.weight.data)
if m.bias is not None:
m.bias.data.zero_()
def forward(self, x):
x = self.features(x)
x = self.classifier(x)
return x.view(x.size(0), self.num_classes)
| 39.611111 | 85 | 0.543899 |
acf4de65fe52fc412eed03ac739a9e84a93edfa3 | 172 | py | Python | autonomous_systems_project/agents/__init__.py | alessandropacielli/autonomous_systems_project | ae429099409356db5cdd19597af871f239300ffb | [
"MIT"
] | null | null | null | autonomous_systems_project/agents/__init__.py | alessandropacielli/autonomous_systems_project | ae429099409356db5cdd19597af871f239300ffb | [
"MIT"
] | null | null | null | autonomous_systems_project/agents/__init__.py | alessandropacielli/autonomous_systems_project | ae429099409356db5cdd19597af871f239300ffb | [
"MIT"
] | null | null | null | from autonomous_systems_project.agents.actor_critic import *
from autonomous_systems_project.agents.double_dqn import *
from autonomous_systems_project.agents.dqn import *
| 43 | 60 | 0.877907 |
acf4dfdc8277f8e1d4d4fc0d9a6753465f96fa36 | 5,392 | py | Python | bonobo/config/services.py | winsmith/bonobo | 6fb9f52bec43a23feac2db968dd4315d75d69910 | [
"Apache-2.0"
] | 1 | 2017-11-13T22:29:27.000Z | 2017-11-13T22:29:27.000Z | bonobo/config/services.py | winsmith/bonobo | 6fb9f52bec43a23feac2db968dd4315d75d69910 | [
"Apache-2.0"
] | null | null | null | bonobo/config/services.py | winsmith/bonobo | 6fb9f52bec43a23feac2db968dd4315d75d69910 | [
"Apache-2.0"
] | null | null | null | import re
import threading
import types
from contextlib import ContextDecorator
from bonobo.config.options import Option
from bonobo.errors import MissingServiceImplementationError
_service_name_re = re.compile(r"^[^\d\W]\w*(:?\.[^\d\W]\w*)*$", re.UNICODE)
def validate_service_name(name):
if not _service_name_re.match(name):
raise ValueError('Invalid service name {!r}.'.format(name))
return name
class Service(Option):
"""
A Service is a special kind of option defining a dependency to something that will be resolved at runtime, using an
identifier. For example, you can create a Configurable that has a "database" Service in its attribute, meaning that
you'll define which database to use, by name, when creating the instance of this class, then provide an
implementation when running the graph using a strategy.
Example::
import bonobo
class QueryExtractor(bonobo.Configurable):
database = bonobo.Service(default='sqlalchemy.engine.default')
graph = bonobo.Graph(
QueryExtractor(database='sqlalchemy.engine.secondary'),
*more_transformations,
)
if __name__ == '__main__':
engine = create_engine('... dsn ...')
bonobo.run(graph, services={
'sqlalchemy.engine.secondary': engine
})
The main goal is not to tie transformations to actual dependencies, so the same can be run in different contexts
(stages like preprod, prod, or tenants like client1, client2, or anything you want).
.. attribute:: name
Service name will be used to retrieve the implementation at runtime.
"""
def __init__(self, name, __doc__=None):
super().__init__(str, required=False, default=name, __doc__=__doc__)
def __set__(self, inst, value):
inst._options_values[self.name] = validate_service_name(value)
def resolve(self, inst, services):
try:
name = getattr(inst, self.name)
except AttributeError:
name = self.name
return services.get(name)
class Container(dict):
def __new__(cls, *args, **kwargs):
if len(args) == 1:
assert not len(kwargs), 'only one usage at a time, my dear.'
if not (args[0]):
return super().__new__(cls)
if isinstance(args[0], cls):
return cls
return super().__new__(cls, *args, **kwargs)
def args_for(self, mixed):
try:
options = dict(mixed.__options__)
except AttributeError:
options = {}
return tuple(option.resolve(mixed, self) for name, option in options.items() if isinstance(option, Service))
def get(self, name, default=None):
if not name in self:
if default:
return default
raise MissingServiceImplementationError(
'Cannot resolve service {!r} using provided service collection.'.format(name)
)
value = super().get(name)
# XXX this is not documented and can lead to errors.
if isinstance(value, types.LambdaType):
value = value(self)
return value
def create_container(services=None, factory=Container):
"""
Create a container with reasonable default service implementations for commonly use, standard-named, services.
Services:
- `fs` defaults to a fs2 instance based on current working directory
- `http`defaults to requests
:param services:
:return:
"""
container = factory(services) if services else factory()
if not 'fs' in container:
import bonobo
container.setdefault('fs', bonobo.open_fs())
if not 'http' in container:
import requests
container.setdefault('http', requests)
return container
class Exclusive(ContextDecorator):
"""
Decorator and context manager used to require exclusive usage of an object, most probably a service. It's usefull
for example if call order matters on a service implementation (think of an http api that requires a nonce or version
parameter ...).
Usage:
>>> def handler(some_service):
... with Exclusive(some_service):
... some_service.call_1()
... some_service.call_2()
... some_service.call_3()
This will ensure that nobody else is using the same service while in the "with" block, using a lock primitive to
ensure that.
"""
_locks = {}
def __init__(self, wrapped):
self._wrapped = wrapped
def get_lock(self):
_id = id(self._wrapped)
if not _id in Exclusive._locks:
Exclusive._locks[_id] = threading.RLock()
return Exclusive._locks[_id]
def __enter__(self):
self.get_lock().acquire()
return self._wrapped
def __exit__(self, *exc):
self.get_lock().release()
def requires(*service_names):
def decorate(mixed):
try:
options = mixed.__options__
except AttributeError:
mixed.__options__ = options = {}
for service_name in service_names:
service = Service(service_name)
service.name = service_name
options[service_name] = service
return mixed
return decorate
| 31.16763 | 120 | 0.630193 |
acf4e0f439f5d085a338043ad6421db59ead84ba | 1,189 | py | Python | start_lightsheet.py | Dispertech/DisperScripts | 774eeac64c8bc92a48f15bcff0976926cec942bf | [
"MIT"
] | null | null | null | start_lightsheet.py | Dispertech/DisperScripts | 774eeac64c8bc92a48f15bcff0976926cec942bf | [
"MIT"
] | null | null | null | start_lightsheet.py | Dispertech/DisperScripts | 774eeac64c8bc92a48f15bcff0976926cec942bf | [
"MIT"
] | null | null | null | # ##############################################################################
# Copyright (c) 2021 Aquiles Carattino, Dispertech B.V. #
# start_lightsheet.py is part of disperscripts #
# This file is released under an MIT license. #
# See LICENSE.MD for more information. #
# ##############################################################################
import time
import yaml
from PyQt5.QtWidgets import QApplication
from LightSheet.models.experiment import LightSheetExperiment
from LightSheet.view.microscope_window import LightsheetWindow
from experimentor.lib.log import log_to_screen, get_logger
if __name__ == "__main__":
logger = get_logger()
handler = log_to_screen(logger=logger)
experiment = LightSheetExperiment()
experiment.load_configuration('lightsheet.yml', yaml.UnsafeLoader)
executor = experiment.initialize()
while executor.running():
time.sleep(.1)
app = QApplication([])
microscope_window = LightsheetWindow(experiment)
microscope_window.show()
app.exec()
experiment.finalize()
| 38.354839 | 80 | 0.57275 |
acf4e1be866eb6f33ceefc2a58346954320d8429 | 2,519 | py | Python | examples/ian_carl_parameters/runfft.py | LBJ-Wade/phenom | 8f0fdc14099dac09cb2eef36d825e577340a8421 | [
"MIT"
] | null | null | null | examples/ian_carl_parameters/runfft.py | LBJ-Wade/phenom | 8f0fdc14099dac09cb2eef36d825e577340a8421 | [
"MIT"
] | null | null | null | examples/ian_carl_parameters/runfft.py | LBJ-Wade/phenom | 8f0fdc14099dac09cb2eef36d825e577340a8421 | [
"MIT"
] | null | null | null | import matplotlib
matplotlib.use('MacOSX')
import matplotlib.pyplot as plt
import phenom
import numpy as np
import lal
import lalsimulation as lalsim
import myfft as fft
from helpers import *
# t, hp, hc = CallTDWaveform(approx="SEOBNRv2")
# t, hp, hc = CallTDWaveform(approx="IMRPhenomD")
# t, hp, hc = CallTDWaveform(approx="IMRPhenomPv2", chi1x=0.5, iota=np.pi/3., eta=0.16)
# t, hp, hc = CallTDWaveform(approx="SEOBNRv3", chi1x=0.5, iota=np.pi/3., eta=0.16)
m1=80.4782639
m2=16.384655
M, eta = phenom.M_eta_m1_m2(m1, m2)
t, hp, hc = CallTDWaveform(approx="IMRPhenomPv2",
M=M, eta=eta,
chi1x=0.5)
# plt.figure()
# plt.plot(t - t[peakindex(hp)], np.real(hp), label='IMRPhenomD')
# plt.legend(loc='best')
# plt.show()
ptaper_lower = phenom.planck_taper(t, t[0], t[0] + 1.)
hp *= ptaper_lower
f, hptilde = fft.fft(t, hp)
# plt.figure()
# mask = f > 0
# plt.plot( f[mask], np.abs(hptilde[mask]), label='TDtoFD' )
# plt.xscale('log')
# plt.yscale('log')
# plt.show()
# ifft and compare
###
#
phenompv3 = phenom.Waveform(approximant="IMRPhenomPv3")
from copy import copy
phenpv3_1 = copy(phenompv3)
phenpv3_1.input_params['m1']=80.4782639
phenpv3_1.input_params['m2']=16.384655
# phenpv3_1.input_params['chi1x']=0.062809065
# phenpv3_1.input_params['chi1y']=0.528722703
# phenpv3_1.input_params['chi1z']=-0.77006942
# phenpv3_1.input_params['chi2x']=-0.102698207
# phenpv3_1.input_params['chi2y']=-0.0977499112
# phenpv3_1.input_params['chi2z']=-0.0815029368
# phenpv3_1.input_params['inclination']=2.85646439
phenpv3_1.input_params['chi1x']=0.5
phenpv3_1.input_params['chi1y']=0.
phenpv3_1.input_params['chi1z']=0.
phenpv3_1.input_params['chi2x']=0.
phenpv3_1.input_params['chi2y']=0.
phenpv3_1.input_params['chi2z']=0.
phenpv3_1.input_params['inclination']=0.
phenpv3_1.input_params['f_min']=10.
phenpv3_1.input_params['delta_f']=1.0/256.
#phenomp_v3 waveform generator
phenpv3_1.phenompv3(phenpv3_1.input_params)
f = phenpv3_1.flist_Hz
hptilde = phenpv3_1.hptilde
ptaper_lower = phenom.planck_taper(f, f[0], f[0] + 1.)
hptilde *= ptaper_lower
# plt.figure()
# mask = f > 0
# plt.plot( f[mask], np.abs(hptilde[mask]), label='phenpv3' )
# plt.xscale('log')
# plt.yscale('log')
# plt.show()
#
###
tnew, hpnew = fft.myifft( f, hptilde, f[0], 5. )
plt.figure()
plt.plot(tnew - tnew[peakindex(hpnew)], np.real(hpnew), label='FDtoTD')
# plt.plot(t - t[peakindex(hp)], np.real(hp), label='LALSIM')
plt.legend(loc='best')
plt.xlim(-2,2)
plt.show()
| 24.696078 | 87 | 0.695117 |
acf4e20eba51ade749dda24e3682f3d9338262a6 | 369 | py | Python | backend/api/models/mixins/EffectiveDates.py | amichard/tfrs | ed3973016cc5c2ae48999d550a23b41a5ddad807 | [
"Apache-2.0"
] | 18 | 2017-05-10T21:55:11.000Z | 2021-03-01T16:41:32.000Z | backend/api/models/mixins/EffectiveDates.py | amichard/tfrs | ed3973016cc5c2ae48999d550a23b41a5ddad807 | [
"Apache-2.0"
] | 1,167 | 2017-03-04T00:18:43.000Z | 2022-03-03T22:31:51.000Z | backend/api/models/mixins/EffectiveDates.py | amichard/tfrs | ed3973016cc5c2ae48999d550a23b41a5ddad807 | [
"Apache-2.0"
] | 48 | 2017-03-09T17:19:39.000Z | 2022-02-24T16:38:17.000Z | from django.db import models
class EffectiveDates(models.Model):
effective_date = models.DateField(
blank=True, null=True, db_comment='The calendar date the value became valid.')
expiration_date = models.DateField(
blank=True, null=True, db_comment='The calendar date the value is no longer valid.')
class Meta:
abstract = True
| 26.357143 | 92 | 0.704607 |
acf4e29d85c2cc6934735aa60786851cde7b20f7 | 248 | py | Python | tf0/tf_10.py | lcl1026504480/mfpython | c1b1689a42488129299e31152764c535eb8e66e0 | [
"MIT"
] | null | null | null | tf0/tf_10.py | lcl1026504480/mfpython | c1b1689a42488129299e31152764c535eb8e66e0 | [
"MIT"
] | null | null | null | tf0/tf_10.py | lcl1026504480/mfpython | c1b1689a42488129299e31152764c535eb8e66e0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Oct 5 19:56:09 2020
@author: lenovouser
"""
import tensorflow as tf
a=tf.constant([[2,2]])
b=tf.constant([[1],
[1]])
c=tf.matmul(a,b)
with tf.Session() as sess:
print(sess.run(c)) | 17.714286 | 35 | 0.568548 |
acf4e2b582ea3797ec2204138be8231ebbf6a6c6 | 8,883 | py | Python | creation/bundles.py | jim-bo/silp2 | 1186a84b2570af0e4ed305ddfff8f931e012eadf | [
"MIT"
] | 1 | 2018-01-29T05:00:43.000Z | 2018-01-29T05:00:43.000Z | creation/bundles.py | jim-bo/silp2 | 1186a84b2570af0e4ed305ddfff8f931e012eadf | [
"MIT"
] | 1 | 2016-01-31T13:13:10.000Z | 2016-02-02T14:16:05.000Z | creation/bundles.py | jim-bo/silp2 | 1186a84b2570af0e4ed305ddfff8f931e012eadf | [
"MIT"
] | null | null | null | #!/usr/bin/python
'''
creates bundle graph from filtered multigraph
'''
### imports ###
import sys
import os
import logging
import networkx as nx
import numpy as np
import scipy.stats as stats
import cPickle
import helpers.io as io
import helpers.misc as misc
### definitions ###
### functions ###
def compress_edges(MG, p, q):
''' compresses the edges '''
# check for types.
bcnts = [0, 0, 0, 0]
for z in MG[p][q]:
bcnts[MG[p][q][z]['state']] += 1
# build numpy arrays for each distance type.
bdists = list()
for i in range(4):
bdists.append(np.zeros(bcnts[i], dtype=np.float))
# populate array with distances.
bidxs = [0, 0, 0, 0]
for z in MG[p][q]:
state = MG[p][q][z]['state']
dist = MG[p][q][z]['dist']
bdists[state][bidxs[state]] = dist
bidxs[state] += 1
# compute bundle info.
devs = list()
means = list()
mins = list()
maxs = list()
for i in range(4):
if bdists[i].shape[0] <= 0:
devs.append(-1)
means.append(-1)
mins.append(-1)
maxs.append(-1)
else:
devs.append(np.std(bdists[i]))
means.append(np.mean(bdists[i]))
mins.append(bdists[i].min())
maxs.append(bdists[i].max())
# return summaries.
return bcnts, bdists, devs, means, mins, maxs
def _load_reps(file_path):
''' loads repeat info from cpickle'''
# no weights.
if file_path == None:
return dict()
# try dictionary emthod.
if os.path.isdir(file_path) == True:
reps = dict()
for f in os.listdir(file_path):
n = f.replace(".npy","")
try:
reps[n] = np.load("%s/%s" % (file_path, f))
except:
continue
return reps
# get weights.
try:
with open(file_path) as fin:
return cPickle.load(fin)
except:
logging.warning("unable to load repeat pickle, ignoring weights")
return dict()
def create_bundles(paths, args):
""" creates bundles
Parameters
----------
paths.edge_file : string
args.bundle_size : int
args.pthresh : int
args.bup : int
"""
# load repeat annotations.
repcnts = _load_reps(args.rep_file)
# load the multi graph.
MG = nx.read_gpickle(paths.edge_file)
# create bundle graph.
BG = nx.Graph()
# add nodes.
for n in MG.nodes():
BG.add_node(n, MG.node[n])
# build set of adjacencies.
adjset = set()
for p, nbrs in MG.adjacency_iter():
for q in nbrs:
adjset.add(tuple(sorted([p,q])))
# compute bundles from adjacencies.
zerod = 0
zcnt = 0
ztot = len(adjset)
for p, q in adjset:
#logging.info("progress: %d of %d" % (zcnt, ztot))
zcnt += 1
# sanity check.
if MG.node[p]['cov'] == 0.0 or MG.node[q]['cov'] == 0.0:
logging.error("how can this happen?")
sys.exit()
# bundle size check.
bsize = len(MG[p][q])
if bsize < args.bundle_size:
continue
# group by insert size.
groups = dict()
std_devs = dict()
for z in MG[p][q]:
ins_size = MG[p][q][z]['ins_size']
if ins_size not in groups:
groups[ins_size] = list()
std_devs[ins_size] = MG[p][q][z]['std_dev']
groups[ins_size].append(z)
# loop over groups.
for ins_size in groups:
# compress info.
bcnts, bdists, devs, means, mins, maxs = compress_edges(MG, p, q)
# compute weights.
cov = 1 - abs(MG.node[p]['cov'] - MG.node[q]['cov']) / (MG.node[p]['cov'] + MG.node[q]['cov'])
# swap bdists for python lists.
for i in range(len(bdists)):
bdists[i] = list(bdists[i])
# add start stop info.
poses1 = list()
poses2 = list()
for z in MG[p][q]:
tmp = MG[p][q][z]
poses1.append((tmp['left1'], tmp['right1']))
poses2.append((tmp['left2'], tmp['right2']))
# create bundle.
if BG.has_edge(p, q):
logging.error("can't have multiple insert sizes between same node")
sys.exit(1)
# zero out negative distances.
avgs = [np.average(bdists[i]) for i in range(4)]
for i in range(4):
if avgs[i] == np.nan:
bcnts[i] = 0.0
if avgs[i] < -2 * args.bundle_size:
bcnts[i] = 0.0
zerod += 1
# don't add it if no support.
if np.sum(bcnts) == 0:
continue
#BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, devs=devs, means=means, mins=mins, maxs=maxs, ins_size=ins_size, std_dev=std_devs[ins_size], poses1=poses1, poses2=poses2)
BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, ins_size=ins_size, std_dev=std_devs[ins_size], cov=cov)
# start the slimming.
logging.info("starting repeat based slimming")
# do repeat mods.
track_upped = 0
track_remed = 0
track_ogedg = len(BG.edges())
idxs = np.zeros(1)
if repcnts != dict():
# create repeat distrib.
repavgs = np.zeros(len(repcnts), dtype=np.dtype([('name','S256'),('avg',np.float)]))
i = 0
for name in repcnts:
# save the name.
repavgs[i]['name'] = name
# skip no repeat info.
if name not in repcnts or repcnts[name] == None:
repavgs[i]['avg'] = 0
i += 1
continue
# take the average over ins_size + 6 (std_dev)
d = args.ins_size + (6 * args.std_dev)
if repcnts[name].shape[0] < d:
repavgs[i]['avg'] = np.average(repcnts[name])
else:
r = range(0,d)+range(len(repcnts[name])-d,len(repcnts[name]))
repavgs[i]['avg'] = np.average(repcnts[name][r])
i += 1
# compute the cutoff threshold.
score = stats.scoreatpercentile(repavgs[:]['avg'], args.pthresh)
idxs = repavgs[:]['avg'] > score
# look at each bundle and see if the repeats necessitates attention.
for p, q in BG.edges():
# get index of pairs.
idp = np.where(repavgs[:]['name'] == p)[0]
idq = np.where(repavgs[:]['name'] == q)[0]
# skip if both not high.
if idxs[idp] == False and idxs[idq] == False:
continue
# get score.
scp = repavgs[idp]['avg']
scq = repavgs[idq]['avg']
# check if this bundle needs attention.
if max(scp, scq) > score:
track_upped += 1
# it gets its minumm bundle size upped.
for i in range(len(BG[p][q]['bcnts'])):
# clear if it doesn't meet criteria.
if BG[p][q]['bcnts'][i] < args.bundle_size + args.bup:
BG[p][q]['bcnts'][i] = 0
# remove bundle if no support.
if np.sum(BG[p][q]['bcnts']) == 0:
track_remed += 1
BG.remove_edge(p,q)
else:
logging.info('no repeat information supplied')
# add repeat weights.
for p, q in BG.edges():
# create weight.
BG[p][q]['u'] = [0.0] * 4
# sum weights.
for z in MG[p][q]:
left1 = MG[p][q][z]['left1']
left2 = MG[p][q][z]['left2']
right1 = MG[p][q][z]['right1']
right2 = MG[p][q][z]['right2']
cntl = np.sum(repcnts[p][left1:left2])
cntr = np.sum(repcnts[p][right1:right2])
try:
propl = 1.0 - (float(cntl) / float(left2-left1))
propr = 1.0 - (float(cntr) / float(right2-right1))
except:
continue
# add average.
p_k = (propl + propr) / 2.0
# add it.
BG[p][q]['u'][MG[p][q][z]['state']] += p_k
# note the modifications due to filtering.
logging.info("contigs with repeat regions in %.2f threshold: %i of %i" % (args.pthresh, np.sum(idxs), len(idxs)))
logging.info("bundles effected by repeats: %i of %i" % (track_upped, track_ogedg))
logging.info("bundles removed by repeats: %i of %i" % (track_remed, track_ogedg))
logging.info("bundles removed by neg dist: %i" % (zerod))
logging.info("total bundles: %i" % (len(BG.edges())))
# write to disk.
nx.write_gpickle(BG, paths.bundle_file)
| 29.808725 | 181 | 0.501745 |
acf4e36373bf887fbe0299382fae383e0bce4c96 | 1,008 | py | Python | var/spack/repos/builtin/packages/cddlib/package.py | RemoteConnectionManager/spack | f2967b6c16effd26ce007cf86cadbb645c574f50 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 3 | 2019-06-27T13:26:50.000Z | 2019-07-01T16:24:54.000Z | var/spack/repos/builtin/packages/cddlib/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 75 | 2016-07-27T11:43:00.000Z | 2020-12-08T15:56:53.000Z | var/spack/repos/builtin/packages/cddlib/package.py | openbiox/spack | bb6ec7fb40c14b37e094a860e3625af53f633174 | [
"ECL-2.0",
"Apache-2.0",
"MIT"
] | 8 | 2015-10-16T13:51:49.000Z | 2021-10-18T13:58:03.000Z | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Cddlib(AutotoolsPackage):
"""The C-library cddlib is a C implementation of the Double Description
Method of Motzkin et al. for generating all vertices (i.e. extreme points)
and extreme rays of a general convex polyhedron in R^d given by a system
of linear inequalities"""
homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/"
url = "ftp://ftp.math.ethz.ch/users/fukudak/cdd/cddlib-094h.tar.gz"
version('0.94h', '1467d270860bbcb26d3ebae424690e7c')
# Note: It should be possible to build cddlib also without gmp
depends_on("gmp")
depends_on("libtool", type="build")
def url_for_version(self, version):
url = "ftp://ftp.math.ethz.ch/users/fukudak/cdd/cddlib-{0}.tar.gz"
return url.format(version.joined)
| 34.758621 | 78 | 0.714286 |
acf4e40e5504ae7c8b6ac02bbaa5e7c233ae7fdb | 391 | py | Python | build/odom_to_trajectory/catkin_generated/pkg.develspace.context.pc.py | farzingkh/Sensor-Fusion-EKF | 28b5c51a1c30b19ba73984cf926ea834f2b318ca | [
"MIT"
] | 1 | 2021-11-09T10:03:51.000Z | 2021-11-09T10:03:51.000Z | build/odom_to_trajectory/catkin_generated/pkg.develspace.context.pc.py | farzingkh/Sensor-Fusion-EKF | 28b5c51a1c30b19ba73984cf926ea834f2b318ca | [
"MIT"
] | 1 | 2021-11-03T02:55:21.000Z | 2021-11-03T02:55:21.000Z | build/odom_to_trajectory/catkin_generated/pkg.develspace.context.pc.py | farzingkh/EKF-Localization | 28b5c51a1c30b19ba73984cf926ea834f2b318ca | [
"MIT"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "odom_to_trajectory"
PROJECT_SPACE_DIR = "/home/workspace/Sensor-Fusion-EKF/devel"
PROJECT_VERSION = "0.0.0"
| 43.444444 | 68 | 0.713555 |
acf4e468b9a87a920e3a42ad26484d6d0b30f8ea | 8,252 | py | Python | moreutils/moreutils.py | the-krak3n/Fixator10-Cogs | 38af6499634a4f4ea200fa9173ccfc9a6bbd14d4 | [
"MIT"
] | 1 | 2020-09-25T21:54:01.000Z | 2020-09-25T21:54:01.000Z | moreutils/moreutils.py | the-krak3n/Fixator10-Cogs | 38af6499634a4f4ea200fa9173ccfc9a6bbd14d4 | [
"MIT"
] | null | null | null | moreutils/moreutils.py | the-krak3n/Fixator10-Cogs | 38af6499634a4f4ea200fa9173ccfc9a6bbd14d4 | [
"MIT"
] | null | null | null | import colorsys
import datetime
import random
import aiohttp
import discord
from redbot.core import checks, commands
from redbot.core.i18n import Translator, cog_i18n
from redbot.core.utils import chat_formatting as chat
from tabulate import tabulate
try:
from redbot import json # support of Draper's branch
except ImportError:
import json
T_ = Translator("MoreUtils", __file__)
_ = lambda s: s
DISCORD_STATUS_NAMES = {
"none": _("OK"),
"minor": _("Minor problems"),
"major": _("Major problems"),
"critical": _("Critical problems"),
}
_ = T_
# credits to https://stackoverflow.com/questions/14088375/how-can-i-convert-rgb-to-cmyk-and-vice-versa-in-python
def rgb_to_cmyk(r, g, b):
rgb_scale = 255
cmyk_scale = 100
if (r == 0) and (g == 0) and (b == 0):
# black
return 0, 0, 0, cmyk_scale
# rgb [0,255] -> cmy [0,1]
c = 1 - (r / float(rgb_scale))
m = 1 - (g / float(rgb_scale))
y = 1 - (b / float(rgb_scale))
# extract out k [0,1]
min_cmy = min(c, m, y)
c = (c - min_cmy) / (1 - min_cmy)
m = (m - min_cmy) / (1 - min_cmy)
y = (y - min_cmy) / (1 - min_cmy)
k = min_cmy
# rescale to the range [0,cmyk_scale]
return c * cmyk_scale, m * cmyk_scale, y * cmyk_scale, k * cmyk_scale
# credits to https://www.geeksforgeeks.org/program-change-rgb-color-model-hsv-color-model/
# logic from http://www.niwa.nu/2013/05/math-behind-colorspace-conversions-rgb-hsl/
def rgb_to_hsv(r, g, b):
# R, G, B values are divided by 255
# to change the range from 0..255 to 0..1:
r, g, b = r / 255.0, g / 255.0, b / 255.0
# h, s, v = hue, saturation, value
cmax = max(r, g, b)
cmin = min(r, g, b)
diff = cmax - cmin
# if cmax and cmax are equal then h = 0
if cmax == cmin:
h = 0
# if cmax equal r then compute h
elif cmax == r:
h = (60 * ((g - b) / diff) + 360) % 360
# if cmax equal g then compute h
elif cmax == g:
h = (60 * ((b - r) / diff) + 120) % 360
# if cmax equal b then compute h
elif cmax == b:
h = (60 * ((r - g) / diff) + 240) % 360
# if cmax equal zero
s = 0 if cmax == 0 else (diff / cmax) * 100
# compute v
v = cmax * 100
return h, s, v
def bool_emojify(bool_var: bool) -> str:
return "✅" if bool_var else "❌"
@cog_i18n(_)
class MoreUtils(commands.Cog):
"""Some (maybe) useful utils."""
__version__ = "2.0.19"
# noinspection PyMissingConstructor
def __init__(self, bot):
self.bot = bot
self.session = aiohttp.ClientSession(json_serialize=json.dumps)
def cog_unload(self):
self.bot.loop.create_task(self.session.close())
async def red_delete_data_for_user(self, **kwargs):
return
@commands.command(name="thetime")
async def _thetime(self, ctx):
"""Displays the current time of the server."""
await ctx.send(datetime.datetime.now().strftime(_("%d.%m.%Y %H:%M:%S %Z")))
@commands.command(aliases=["HEX", "hex", "colour"])
@checks.bot_has_permissions(embed_links=True)
@commands.max_concurrency(1, commands.BucketType.user)
async def color(self, ctx, *, color: discord.Color):
"""Shows some info about provided color."""
colorrgb = color.to_rgb()
rgb_coords = [x / 255 for x in colorrgb]
colorhsv = rgb_to_hsv(*colorrgb)
h, l, s = colorsys.rgb_to_hls(*rgb_coords)
colorhls = (colorhsv[0], l * 100, s * 100)
coloryiq = colorsys.rgb_to_yiq(*rgb_coords)
colorcmyk = rgb_to_cmyk(*colorrgb)
colors_text = (
"`HEX :` {}\n"
"`RGB :` {}\n"
"`CMYK:` {}\n"
"`HSV :` {}\n"
"`HLS :` {}\n"
"`YIQ :` {}\n"
"`Int :` {}".format(
str(color),
colorrgb,
tuple(map(lambda x: isinstance(x, float) and round(x, 2) or x, colorcmyk)),
tuple(map(lambda x: isinstance(x, float) and round(x, 2) or x, colorhsv)),
tuple(map(lambda x: isinstance(x, float) and round(x, 2) or x, colorhls)),
tuple(map(lambda x: isinstance(x, float) and round(x, 2) or x, coloryiq)),
color.value,
)
)
em = discord.Embed(
title=str(color),
description=_("`Name:` Loading...\n") + colors_text,
url=f"http://www.color-hex.com/color/{str(color)[1:]}",
colour=color,
timestamp=ctx.message.created_at,
)
# CAUTION: That can fail soon
em.set_thumbnail(url=f"https://api.alexflipnote.dev/color/image/{str(color)[1:]}")
em.set_image(url=f"https://api.alexflipnote.dev/color/image/gradient/{str(color)[1:]}")
m = await ctx.send(embed=em)
async with self.session.get(
"https://www.thecolorapi.com/id", params={"hex": str(color)[1:]}
) as data:
color_response = await data.json(loads=json.loads)
em.description = (
_("`Name:` {} ({})\n").format(
color_response.get("name", {}).get("value", "?"),
color_response.get("name", {}).get("closest_named_hex", "?"),
)
+ colors_text
)
await m.edit(embed=em)
@commands.guild_only()
@commands.command()
async def someone(self, ctx, *, text: str = None):
"""Help I've fallen and I need @someone.
Discord 2018 April Fools."""
smilies = [
"¯\\_(ツ)_/¯",
"(∩ ͡° ͜ʖ ͡°)⊃━☆゚. o ・ 。゚",
"(∩ ͡° ͜ʖ ͡°)⊃━✿✿✿✿✿✿",
"༼ つ ◕_◕ ༽つ",
"(◕‿◕✿)",
"(⁄ ⁄•⁄ω⁄•⁄ ⁄)",
"(╯°□°)╯︵ ┻━┻",
"ಠ_ಠ",
"¯\\(°_o)/¯",
"(✿ ͡◕ ᴗ◕)つ━━✫・o。",
"ヽ༼ ಠ益ಠ ༽ノ",
]
smile = random.choice(smilies)
member = random.choice(ctx.channel.members)
await ctx.send(
"**@someone** {} ***{}*** {}".format(
smile,
chat.escape(member.display_name, mass_mentions=True),
chat.escape(text, mass_mentions=True) if text else "",
)
)
@commands.command(pass_context=True)
@commands.cooldown(1, 10, commands.BucketType.user)
@commands.max_concurrency(1, commands.BucketType.user)
async def discordstatus(self, ctx):
"""Get current discord status from discordstatus.com"""
async with ctx.typing():
try:
async with self.session.get(
"https://srhpyqt94yxb.statuspage.io/api/v2/summary.json"
) as data:
response = await data.json(loads=json.loads)
except Exception as e:
await ctx.send(
chat.error(
_("Unable to get data from https://discordstatus.com: {}").format(e)
)
)
return
status = response["status"]
components = response["components"]
if await ctx.embed_requested():
embed = discord.Embed(
title=_("Discord Status"),
description=_(
DISCORD_STATUS_NAMES.get(status["indicator"], status["indicator"])
),
timestamp=datetime.datetime.fromisoformat(response["page"]["updated_at"])
.astimezone(datetime.timezone.utc)
.replace(tzinfo=None), # make naive
color=await ctx.embed_color(),
url="https://discordstatus.com",
)
for component in components:
embed.add_field(
name=component["name"],
value=component["status"].capitalize().replace("_", " "),
)
await ctx.send(embed=embed)
else:
await ctx.send(
f"{_(DISCORD_STATUS_NAMES.get(status['indicator'], status['indicator']))}\n"
f"{chat.box(tabulate([(c['name'], c['status'].capitalize().replace('_', ' ')) for c in components]))}"
)
| 34.240664 | 122 | 0.523873 |
acf4e5141a6c31e7a42a259b31ae06708594204d | 667 | py | Python | ACME/math/fibonacci.py | mauriziokovacic/ACME | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | 3 | 2019-10-23T23:10:55.000Z | 2021-09-01T07:30:14.000Z | ACME/math/fibonacci.py | mauriziokovacic/ACME-Python | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | null | null | null | ACME/math/fibonacci.py | mauriziokovacic/ACME-Python | 2615b66dd4addfd5c03d9d91a24c7da414294308 | [
"MIT"
] | 1 | 2020-07-11T11:35:43.000Z | 2020-07-11T11:35:43.000Z | import math
def n_bonacci(N, n):
"""
Computes the n-bonacci number for the given input
Parameters
----------
N : int
the sequence number
n : int
the number to compute the series from
Returns
-------
int
the n-bonacci number for the given input
"""
if n <= 1:
return n
return N*n_bonacci(n-1, N)+n_bonacci(n-2, N)
def fibonacci(n):
"""
Computes the fibonacci number for the given input
Parameters
----------
n : int
the number to compute the series from
Returns
-------
int
the fibonacci number
"""
return n_bonacci(n, 1)
| 15.880952 | 53 | 0.541229 |
acf4e56090dc65de244b464008bd1286a5e263c4 | 696 | py | Python | pypipeline/cellio/standardio/__init__.py | JohannesVerherstraeten/pypipeline | 1ccb2a2567dc3159123f6cacf6b25b6c6b1fbff2 | [
"Apache-2.0"
] | 3 | 2022-02-20T18:45:11.000Z | 2022-02-20T19:09:40.000Z | pypipeline/cellio/standardio/__init__.py | JohannesVerherstraeten/pypipeline | 1ccb2a2567dc3159123f6cacf6b25b6c6b1fbff2 | [
"Apache-2.0"
] | null | null | null | pypipeline/cellio/standardio/__init__.py | JohannesVerherstraeten/pypipeline | 1ccb2a2567dc3159123f6cacf6b25b6c6b1fbff2 | [
"Apache-2.0"
] | 1 | 2022-03-07T17:06:00.000Z | 2022-03-07T17:06:00.000Z | # Copyright 2021 Johannes Verherstraeten
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pypipeline.cellio.standardio.input import Input
from pypipeline.cellio.standardio.output import Output
| 40.941176 | 74 | 0.783046 |
acf4e6173d88656c6f1f905f9bb711f99a4e078e | 343 | py | Python | candidates_crossing_test.py | mustafmst/thesis-snake-bot | 329cbdf5b17a8a3344e81c4716e3a6ef3756acc4 | [
"MIT"
] | 4 | 2019-02-13T16:41:11.000Z | 2019-08-19T15:45:41.000Z | candidates_crossing_test.py | mustafmst/thesis-snake-bot | 329cbdf5b17a8a3344e81c4716e3a6ef3756acc4 | [
"MIT"
] | 6 | 2020-01-28T22:37:25.000Z | 2022-02-10T00:15:22.000Z | candidates_crossing_test.py | mustafmst/thesis-snake-bot | 329cbdf5b17a8a3344e81c4716e3a6ef3756acc4 | [
"MIT"
] | 1 | 2020-11-21T16:28:32.000Z | 2020-11-21T16:28:32.000Z | from geneticAI.geneticAlgorithm.candidate import Candidate
from geneticAI.config import RUN_CONFIG
import numpy as np
def main():
a = Candidate(RUN_CONFIG)
b = Candidate(RUN_CONFIG)
a.get_genotype()
b.get_genotype()
c = a.cross_with(b)
w = c.get_genotype()
c.get_score()
if __name__ == '__main__':
main()
| 17.15 | 58 | 0.685131 |
acf4e6ab7d2e6fb77f9868513f63bbfd9ddca080 | 3,122 | py | Python | src/genie/libs/parser/iosxe/tests/ShowWirelessStatsMobility/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowWirelessStatsMobility/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowWirelessStatsMobility/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"mobility_event_statistics": {
"joined_as": {
"local": 60431,
"foreign": 0,
"export_foreign": 0,
"export_anchor": 0,
},
"delete": {"local": 60316, "remote": 0},
"role_changes": {"local_to_anchor": 0, "anchor_to_local": 0},
"roam_stats": {
"l2_roam_count": 0,
"l3_roam_count": 0,
"flex_client_roam_count": 0,
"inter_wncd_roam_count": 0,
"intra_wncd_roam_count": 154227,
"remote_inter_cntrl_roam_count": 0,
"remote_webauth_pending_roams": 0,
},
"anchor_request": {
"sent": 0,
"grant_received": 0,
"deny_received": 0,
"received": 0,
"grant_sent": 0,
"deny_sent": 0,
},
"handoff_status_received": {
"success": 0,
"group_mismatch": 0,
"client_unknown": 0,
"client_blacklisted": 0,
"ssid_mismatch": 0,
"denied": 0,
"l3_vlan_override": 0,
"unknown_peer": 0,
},
"handoff_status_sent": {
"success": 0,
"group_mismatch": 0,
"client_unknown": 0,
"client_blacklisted": 0,
"ssid_mismatch": 0,
"denied": 0,
"l3_vlan_override": 0,
},
"export_anchor": {
"request_sent": 0,
"response_received": {
"ok": 0,
"deny_generic": 0,
"client_blacklisted": 0,
"client_limit_reached": 0,
"profile_mismatch": 0,
"deny_unknown_reason": 0,
"request_received": 0,
},
"response_sent": {
"ok": 0,
"deny_generic": 0,
"client_blacklisted": 0,
"client_limit_reached": 0,
"profile_mismatch": 0,
},
},
},
"mm_mobility_event_statistics": {
"event_data_allocs": 120747,
"event_data_frees": 120747,
"fsm_set_allocs": 60427,
"fsm_set_frees": 60316,
"timer_allocs": 0,
"timer_frees": 0,
"timer_starts": 0,
"timer_stops": 0,
"invalid_events": 0,
"internal_errors": 0,
"delete_internal_errors": 0,
"roam_internal_errors": 0,
},
"mmif_mobility_event_statistics": {
"event_data_allocs": 354187,
"event_data_frees": 354187,
"invalid_events": 13,
"event_schedule_errors": 0,
"mmif_internal_errors": {
"ipc_failure": 0,
"database_failure": 0,
"invalid_parameters": 0,
"mobility_message_decode_failure": 0,
"fsm_failure": 0,
"client_handoff_success": 0,
"client_handoff_failure": 0,
"anchor_deny": 0,
"remote_delete": 0,
"tunnel_down_delete": 0,
"mbssid_down": 0,
"unknown_failure": 0,
},
},
}
| 30.607843 | 69 | 0.471813 |
acf4e83dee8fc642a5cf093c9f76805c6d854062 | 2,068 | py | Python | util.py | videogorillas/pytorch-unet-dust | 102114f7f66b2f39b697a4244dd5f52eb24a7891 | [
"MIT"
] | null | null | null | util.py | videogorillas/pytorch-unet-dust | 102114f7f66b2f39b697a4244dd5f52eb24a7891 | [
"MIT"
] | null | null | null | util.py | videogorillas/pytorch-unet-dust | 102114f7f66b2f39b697a4244dd5f52eb24a7891 | [
"MIT"
] | null | null | null | import cv2
import torch
import numpy as np
from PIL import Image
def tensor2im(input_image, imtype=np.uint8):
""""Converts a Tensor array into a numpy image array.
Parameters:
input_image (tensor) -- the input image tensor array
imtype (type) -- the desired type of the converted numpy array
"""
if not isinstance(input_image, np.ndarray):
if isinstance(input_image, torch.Tensor): # get the data from a variable
image_tensor = input_image.data
else:
return input_image
tensor0 = None
if (len(image_tensor.shape) == 3):
tensor0 = image_tensor
else:
tensor0 = image_tensor[0]
image_numpy = tensor0.cpu().float().numpy() # convert it into a numpy array
if image_numpy.shape[0] == 1: # grayscale to RGB
image_numpy = np.tile(image_numpy, (3, 1, 1))
# image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 # post-processing: tranpose and scaling
image_numpy = (np.transpose(image_numpy, (1, 2, 0))) * 255.0 # post-processing: tranpose and scaling
else: # if it is a numpy array, do nothing
image_numpy = input_image
return image_numpy.astype(imtype)
def save_image(image_numpy, image_path):
"""Save a numpy image to the disk
Parameters:
image_numpy (numpy array) -- input numpy array
image_path (str) -- the path of the image
"""
cv2.imwrite(image_path, image_numpy)
# image_pil = Image.fromarray(image_numpy)
# image_pil.save(image_path)
# reads image in 16bit per channel mode if possible, returns ndarray (h,w,c) normalized to [0.0..1.0]
def imread16(path: str) -> np.ndarray:
flags = cv2.IMREAD_ANYDEPTH | cv2.IMREAD_ANYCOLOR
src = cv2.imread(path, flags)
assert src.dtype == np.uint16 or src.dtype == np.uint8, "unhandled data type " + str(src.dtype) + " while reading " + path
max_val = 65535. if src.dtype == np.uint16 else 255.
return src.astype("float32") / max_val
| 38.296296 | 128 | 0.641683 |
acf4e896007d4129f116290e4f71001cec49bdae | 3,446 | py | Python | handlers.py | MilkMare/123 | 46dfa216407899c2ea2f9fb6aa2a5e3f4a0ec896 | [
"MIT"
] | null | null | null | handlers.py | MilkMare/123 | 46dfa216407899c2ea2f9fb6aa2a5e3f4a0ec896 | [
"MIT"
] | null | null | null | handlers.py | MilkMare/123 | 46dfa216407899c2ea2f9fb6aa2a5e3f4a0ec896 | [
"MIT"
] | null | null | null | import os
from telegram.ext import CommandHandler, MessageHandler, Filters
from settings import WELCOME_MESSAGE, TELEGRAM_SUPPORT_CHAT_ID, REPLY_TO_THIS_MESSAGE, WRONG_REPLY
def start(update, context):
update.message.reply_text(WELCOME_MESSAGE)
user_info = update.message.from_user.to_dict()
context.bot.send_message(
chat_id=TELEGRAM_SUPPORT_CHAT_ID,
text=f"""
Новый пользователь запустил бота
""",
)
def forward_to_chat(update, context):
"""{
'message_id': 5,
'date': 1605106546,
'chat': {'id': 49820636, 'type': 'private', 'username': 'danokhlopkov'},
'text': 'TEST QOO', 'entities': [], 'caption_entities': [], 'photo': [], 'new_chat_members': [], 'new_chat_photo': [], 'delete_chat_photo': False, 'group_chat_created': False, 'supergroup_chat_created': False, 'channel_chat_created': False,
'from': {'id': 49820636, 'username': 'milkmare'}
}"""
forwarded = update.message.forward(chat_id=TELEGRAM_SUPPORT_CHAT_ID)
if not forwarded.forward_from:
context.bot.send_message(
chat_id=TELEGRAM_SUPPORT_CHAT_ID,
reply_to_message_id=forwarded.message_id,
text=f'{update.message.from_user.id}\n{REPLY_TO_THIS_MESSAGE}'
)
def forward_to_user(update, context):
"""{
'message_id': 10, 'date': 1605106662,
'chat': {'id': -484179205, 'type': 'group', 'title': '☎️ SUPPORT CHAT', 'all_members_are_administrators': True},
'reply_to_message': {
'message_id': 9, 'date': 1605106659,
'chat': {'id': -484179205, 'type': 'group', 'title': '☎️ SUPPORT CHAT', 'all_members_are_administrators': True},
'forward_from': {'id': 49820636, 'user_name': 'MilkMare'},
'forward_date': 1605106658,
'text': 'g', 'entities': [], 'caption_entities': [], 'photo': [], 'new_chat_members': [], 'new_chat_photo': [],
'delete_chat_photo': False, 'group_chat_created': False, 'supergroup_chat_created': False, 'channel_chat_created': False,
'from': {'id': 1440913096, 'username': 'milkmare'}
},
'text': 'ggg', 'entities': [], 'caption_entities': [], 'photo': [], 'new_chat_members': [], 'new_chat_photo': [], 'delete_chat_photo': False,
'group_chat_created': False, 'supergroup_chat_created': False, 'channel_chat_created': False,
'from': {'id': 49820636, 'first_name': 'Daniil', 'username': 'milkmare'}
}"""
user_id = None
if update.message.reply_to_message.forward_from:
user_id = update.message.reply_to_message.forward_from.id
elif REPLY_TO_THIS_MESSAGE in update.message.reply_to_message.text:
try:
user_id = int(update.message.reply_to_message.text.split('\n')[0])
except ValueError:
user_id = None
if user_id:
context.bot.copy_message(
message_id=update.message.message_id,
chat_id=user_id,
from_chat_id=update.message.chat_id
)
else:
context.bot.send_message(
chat_id=TELEGRAM_SUPPORT_CHAT_ID,
text=WRONG_REPLY
)
def setup_dispatcher(dp):
dp.add_handler(CommandHandler('start', start))
dp.add_handler(MessageHandler(Filters.chat_type.private, forward_to_chat))
dp.add_handler(MessageHandler(Filters.chat(TELEGRAM_SUPPORT_CHAT_ID) & Filters.reply, forward_to_user))
return dp
| 43.075 | 249 | 0.644515 |
acf4eaae516ed0b8d1931a14595da0128b2e6c7f | 12,242 | py | Python | se_code/helper.py | turian/PerceptualAudio | 14db8ee27580ebe6ceaea9d5355863ffc603c0ae | [
"MIT"
] | 264 | 2020-01-13T18:52:09.000Z | 2022-03-28T13:50:37.000Z | se_code/helper.py | turian/PerceptualAudio | 14db8ee27580ebe6ceaea9d5355863ffc603c0ae | [
"MIT"
] | 24 | 2020-01-15T10:00:22.000Z | 2022-02-09T23:35:19.000Z | se_code/helper.py | turian/PerceptualAudio | 14db8ee27580ebe6ceaea9d5355863ffc603c0ae | [
"MIT"
] | 33 | 2020-01-14T04:44:22.000Z | 2022-01-04T04:04:44.000Z | import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
from sklearn.preprocessing import normalize
# LEAKY RELU UNIT
def lrelu(x):
return tf.maximum(0.2*x,x)
# GENERATE DILATED LAYER FROM 1D SIGNAL
def signal_to_dilated(signal, dilation, n_channels):
shape = tf.shape(signal)
pad_elements = dilation - 1 - (shape[2] + dilation - 1) % dilation
dilated = tf.pad(signal, [[0, 0], [0, 0], [0, pad_elements], [0, 0]])
dilated = tf.reshape(dilated, [shape[0],-1,dilation,n_channels])
return tf.transpose(dilated, perm=[0,2,1,3]), pad_elements
# COLLAPSE DILATED LAYER TO 1D SIGNAL
def dilated_to_signal(dilated, pad_elements, n_channels):
shape = tf.shape(dilated)
signal = tf.transpose(dilated, perm=[0,2,1,3])
signal = tf.reshape(signal, [shape[0],1,-1,n_channels])
return signal[:,:,:shape[1]*shape[2]-pad_elements,:]
# ADAPTIVE BATCH NORMALIZATION LAYER
def nm(x):
w0=tf.Variable(1.0,name='w0')
w1=tf.Variable(0.0,name='w1')
return w0*x+w1*slim.batch_norm(x)
# IDENTITY INITIALIZATION OF CONV LAYERS
def identity_initializer():
def _initializer(shape, dtype=tf.float32, partition_info=None):
array = np.zeros(shape, dtype=float)
cx, cy = shape[0]//2, shape[1]//2
for i in range(np.minimum(shape[2],shape[3])):
array[cx, cy, i, i] = 1
return tf.constant(array, dtype=dtype)
return _initializer
# L1 LOSS FUNCTION
def l1_loss(target,current):
return tf.reduce_mean(tf.abs(target-current))
def l1_loss_all(agg):
return tf.reduce_mean(tf.abs(agg))
def l2_loss_all(agg):
return tf.reduce_mean(tf.square(agg))
def l1_loss_batch(target,current):
return tf.reduce_mean(tf.abs(target-current),axis=[1,2,3])
# L2 LOSS FUNCTION
def l2_loss(target,current):
return tf.reduce_mean(tf.square(target-current))
def l2_loss_unit(target,current):
target=tf.linalg.l2_normalize(target,axis=3)
current = tf.linalg.l2_normalize(current,axis=3)
return tf.reduce_mean(tf.square(target-current))
def frame(data, window_length, hop_length):
"""Convert array into a sequence of successive possibly overlapping frames.
An n-dimensional array of shape (num_samples, ...) is converted into an
(n+1)-D array of shape (num_frames, window_length, ...), where each frame
starts hop_length points after the preceding one.
This is accomplished using stride_tricks, so the original data is not
copied. However, there is no zero-padding, so any incomplete frames at the
end are not included.
Args:
data: np.array of dimension N >= 1.
window_length: Number of samples in each frame.
hop_length: Advance (in samples) between each window.
Returns:
(N+1)-D np.array with as many rows as there are complete frames that can be
extracted.
"""
num_samples = data.shape[0]
num_frames = 1 + int(np.floor((num_samples - window_length) / hop_length))
shape = (num_frames, window_length) + data.shape[1:]
strides = (data.strides[0] * hop_length,) + data.strides
return np.lib.stride_tricks.as_strided(data, shape=shape, strides=strides)
def periodic_hann(window_length):
"""Calculate a "periodic" Hann window.
The classic Hann window is defined as a raised cosine that starts and
ends on zero, and where every value appears twice, except the middle
point for an odd-length window. Matlab calls this a "symmetric" window
and np.hanning() returns it. However, for Fourier analysis, this
actually represents just over one cycle of a period N-1 cosine, and
thus is not compactly expressed on a length-N Fourier basis. Instead,
it's better to use a raised cosine that ends just before the final
zero value - i.e. a complete cycle of a period-N cosine. Matlab
calls this a "periodic" window. This routine calculates it.
Args:
window_length: The number of points in the returned window.
Returns:
A 1D np.array containing the periodic hann window.
"""
return 0.5 - (0.5 * np.cos(2 * np.pi / window_length *
np.arange(window_length)))
def stft_magnitude(signal, fft_length,
hop_length=None,
window_length=None):
"""Calculate the short-time Fourier transform magnitude.
Args:
signal: 1D np.array of the input time-domain signal.
fft_length: Size of the FFT to apply.
hop_length: Advance (in samples) between each frame passed to FFT.
window_length: Length of each block of samples to pass to FFT.
Returns:
2D np.array where each row contains the magnitudes of the fft_length/2+1
unique values of the FFT for the corresponding frame of input samples.
"""
frames = frame(signal, window_length, hop_length)
# Apply frame window to each frame. We use a periodic Hann (cosine of period
# window_length) instead of the symmetric Hann of np.hanning (period
# window_length-1).
window = periodic_hann(window_length)
windowed_frames = frames * window
return np.abs(np.fft.rfft(windowed_frames, int(fft_length)))
# Mel spectrum constants and functions.
_MEL_BREAK_FREQUENCY_HERTZ = 700.0
_MEL_HIGH_FREQUENCY_Q = 1127.0
def hertz_to_mel(frequencies_hertz):
"""Convert frequencies to mel scale using HTK formula.
Args:
frequencies_hertz: Scalar or np.array of frequencies in hertz.
Returns:
Object of same size as frequencies_hertz containing corresponding values
on the mel scale.
"""
return _MEL_HIGH_FREQUENCY_Q * np.log(
1.0 + (frequencies_hertz / _MEL_BREAK_FREQUENCY_HERTZ))
def spectrogram_to_mel_matrix(num_mel_bins=64,
num_spectrogram_bins=129,
audio_sample_rate=8000,
lower_edge_hertz=125.0,
upper_edge_hertz=3800.0):
"""Return a matrix that can post-multiply spectrogram rows to make mel.
Returns a np.array matrix A that can be used to post-multiply a matrix S of
spectrogram values (STFT magnitudes) arranged as frames x bins to generate a
"mel spectrogram" M of frames x num_mel_bins. M = S A.
The classic HTK algorithm exploits the complementarity of adjacent mel bands
to multiply each FFT bin by only one mel weight, then add it, with positive
and negative signs, to the two adjacent mel bands to which that bin
contributes. Here, by expressing this operation as a matrix multiply, we go
from num_fft multiplies per frame (plus around 2*num_fft adds) to around
num_fft^2 multiplies and adds. However, because these are all presumably
accomplished in a single call to np.dot(), it's not clear which approach is
faster in Python. The matrix multiplication has the attraction of being more
general and flexible, and much easier to read.
Args:
num_mel_bins: How many bands in the resulting mel spectrum. This is
the number of columns in the output matrix.
num_spectrogram_bins: How many bins there are in the source spectrogram
data, which is understood to be fft_size/2 + 1, i.e. the spectrogram
only contains the nonredundant FFT bins.
audio_sample_rate: Samples per second of the audio at the input to the
spectrogram. We need this to figure out the actual frequencies for
each spectrogram bin, which dictates how they are mapped into mel.
lower_edge_hertz: Lower bound on the frequencies to be included in the mel
spectrum. This corresponds to the lower edge of the lowest triangular
band.
upper_edge_hertz: The desired top edge of the highest frequency band.
Returns:
An np.array with shape (num_spectrogram_bins, num_mel_bins).
Raises:
ValueError: if frequency edges are incorrectly ordered or out of range.
"""
nyquist_hertz = audio_sample_rate / 2.
if lower_edge_hertz < 0.0:
raise ValueError("lower_edge_hertz %.1f must be >= 0" % lower_edge_hertz)
if lower_edge_hertz >= upper_edge_hertz:
raise ValueError("lower_edge_hertz %.1f >= upper_edge_hertz %.1f" %
(lower_edge_hertz, upper_edge_hertz))
if upper_edge_hertz > nyquist_hertz:
raise ValueError("upper_edge_hertz %.1f is greater than Nyquist %.1f" %
(upper_edge_hertz, nyquist_hertz))
spectrogram_bins_hertz = np.linspace(0.0, nyquist_hertz, num_spectrogram_bins)
spectrogram_bins_mel = hertz_to_mel(spectrogram_bins_hertz)
# The i'th mel band (starting from i=1) has center frequency
# band_edges_mel[i], lower edge band_edges_mel[i-1], and higher edge
# band_edges_mel[i+1]. Thus, we need num_mel_bins + 2 values in
# the band_edges_mel arrays.
band_edges_mel = np.linspace(hertz_to_mel(lower_edge_hertz),
hertz_to_mel(upper_edge_hertz), num_mel_bins + 2)
# Matrix to post-multiply feature arrays whose rows are num_spectrogram_bins
# of spectrogram values.
mel_weights_matrix = np.empty((num_spectrogram_bins, num_mel_bins))
for i in range(num_mel_bins):
lower_edge_mel, center_mel, upper_edge_mel = band_edges_mel[i:i + 3]
# Calculate lower and upper slopes for every spectrogram bin.
# Line segments are linear in the *mel* domain, not hertz.
lower_slope = ((spectrogram_bins_mel - lower_edge_mel) /
(center_mel - lower_edge_mel))
upper_slope = ((upper_edge_mel - spectrogram_bins_mel) /
(upper_edge_mel - center_mel))
# .. then intersect them with each other and zero.
mel_weights_matrix[:, i] = np.maximum(0.0, np.minimum(lower_slope,
upper_slope))
# HTK excludes the spectrogram DC bin; make sure it always gets a zero
# coefficient.
mel_weights_matrix[0, :] = 0.0
return mel_weights_matrix
def log_mel_spectrogram(data,
audio_sample_rate=44100,
log_offset=0.01,
window_length_secs=0.025,
hop_length_secs=0.010,
**kwargs):
"""Convert waveform to a log magnitude mel-frequency spectrogram.
Args:
data: 1D np.array of waveform data.
audio_sample_rate: The sampling rate of data.
log_offset: Add this to values when taking log to avoid -Infs.
window_length_secs: Duration of each window to analyze.
hop_length_secs: Advance between successive analysis windows.
**kwargs: Additional arguments to pass to spectrogram_to_mel_matrix.
Returns:
2D np.array of (num_frames, num_mel_bins) consisting of log mel filterbank
magnitudes for successive frames.
"""
window_length_samples = int(round(audio_sample_rate * window_length_secs))
hop_length_samples = int(round(audio_sample_rate * hop_length_secs))
fft_length = 2 ** int(np.ceil(np.log(window_length_samples) / np.log(2.0)))
spectrogram = stft_magnitude(
data,
fft_length=fft_length,
hop_length=hop_length_samples,
window_length=window_length_samples)
mel_spectrogram = np.dot(spectrogram, spectrogram_to_mel_matrix(
num_spectrogram_bins=spectrogram.shape[1],
audio_sample_rate=audio_sample_rate, **kwargs))
return np.log(mel_spectrogram + log_offset)
def voc_ap(rec, prec, use_07_metric=True):
""" ap = voc_ap(rec, prec, [use_07_metric])
Compute VOC AP given precision and recall.
If use_07_metric is true, uses the
VOC 07 11 point method (default:False).
"""
if use_07_metric:
# 11 point metric
ap = 0.
for t in np.arange(0., 1.1, 0.1):
if np.sum(rec >= t) == 0:
p = 0
else:
p = np.max(prec[rec >= t])
ap = ap + p / 11.
else:
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
#print('abc')
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap | 42.655052 | 80 | 0.691962 |
acf4ec069d37045a2be0404eb8083175bb9770e4 | 388 | py | Python | init5/config/asgi.py | iYoQ/init5 | be67fc501376f17f2f6a258c5f8c2002defbc415 | [
"Apache-2.0"
] | null | null | null | init5/config/asgi.py | iYoQ/init5 | be67fc501376f17f2f6a258c5f8c2002defbc415 | [
"Apache-2.0"
] | null | null | null | init5/config/asgi.py | iYoQ/init5 | be67fc501376f17f2f6a258c5f8c2002defbc415 | [
"Apache-2.0"
] | null | null | null | """
ASGI config for init5 project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')
application = get_asgi_application()
| 22.823529 | 78 | 0.783505 |
acf4ed506e3a670ce4233474186fc563ebe3dfe2 | 69,983 | py | Python | tensorflow/python/keras/layers/recurrent_test.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 190,993 | 2015-11-09T13:17:30.000Z | 2022-03-31T23:05:27.000Z | tensorflow/python/keras/layers/recurrent_test.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 48,461 | 2015-11-09T14:21:11.000Z | 2022-03-31T23:17:33.000Z | tensorflow/python/keras/layers/recurrent_test.py | EricRemmerswaal/tensorflow | 141ff27877579c81a213fa113bd1b474c1749aca | [
"Apache-2.0"
] | 104,981 | 2015-11-09T13:40:17.000Z | 2022-03-31T19:51:54.000Z | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for recurrent layers functionality other than GRU, LSTM, SimpleRNN.
See also: lstm_test.py, gru_test.py, simplernn_test.py.
"""
import collections
from absl.testing import parameterized
import numpy as np
from tensorflow.python import keras
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import keras_parameterized
from tensorflow.python.keras import testing_utils
from tensorflow.python.keras.engine import base_layer_utils
from tensorflow.python.keras.layers import recurrent as rnn_v1
from tensorflow.python.keras.layers import recurrent_v2 as rnn_v2
from tensorflow.python.keras.layers.legacy_rnn import rnn_cell_impl
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import special_math_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.platform import test
from tensorflow.python.training.tracking import util as trackable_util
from tensorflow.python.util import nest
# Used for nested input/output/state RNN test.
NestedInput = collections.namedtuple('NestedInput', ['t1', 't2'])
NestedState = collections.namedtuple('NestedState', ['s1', 's2'])
@keras_parameterized.run_all_keras_modes
class RNNTest(keras_parameterized.TestCase):
def test_minimal_rnn_cell_non_layer(self):
class MinimalRNNCell(object):
def __init__(self, units, input_dim):
self.units = units
self.state_size = units
self.kernel = keras.backend.variable(
np.random.random((input_dim, units)))
def call(self, inputs, states):
prev_output = states[0]
output = keras.backend.dot(inputs, self.kernel) + prev_output
return output, [output]
# Basic test case.
cell = MinimalRNNCell(32, 5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8, 5),
MinimalRNNCell(32, 8),
MinimalRNNCell(32, 32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_minimal_rnn_cell_non_layer_multiple_states(self):
class MinimalRNNCell(object):
def __init__(self, units, input_dim):
self.units = units
self.state_size = (units, units)
self.kernel = keras.backend.variable(
np.random.random((input_dim, units)))
def call(self, inputs, states):
prev_output_1 = states[0]
prev_output_2 = states[1]
output = keras.backend.dot(inputs, self.kernel)
output += prev_output_1
output -= prev_output_2
return output, [output * 2, output * 3]
# Basic test case.
cell = MinimalRNNCell(32, 5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8, 5),
MinimalRNNCell(16, 8),
MinimalRNNCell(32, 16)]
layer = keras.layers.RNN(cells)
self.assertEqual(layer.cell.state_size, ((8, 8), (16, 16), (32, 32)))
self.assertEqual(layer.cell.output_size, 32)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_minimal_rnn_cell_layer(self):
class MinimalRNNCell(keras.layers.Layer):
def __init__(self, units, **kwargs):
self.units = units
self.state_size = units
super(MinimalRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = keras.backend.dot(inputs, self.kernel)
output = h + keras.backend.dot(prev_output, self.recurrent_kernel)
return output, [output]
def get_config(self):
config = {'units': self.units}
base_config = super(MinimalRNNCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
# Test basic case.
x = keras.Input((None, 5))
cell = MinimalRNNCell(32)
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
with generic_utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}):
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Test stacking.
cells = [MinimalRNNCell(8),
MinimalRNNCell(12),
MinimalRNNCell(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacked RNN serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
with generic_utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}):
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
def test_minimal_rnn_cell_abstract_rnn_cell(self):
class MinimalRNNCell(keras.layers.AbstractRNNCell):
def __init__(self, units, **kwargs):
self.units = units
super(MinimalRNNCell, self).__init__(**kwargs)
@property
def state_size(self):
return self.units
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = keras.backend.dot(inputs, self.kernel)
output = h + keras.backend.dot(prev_output, self.recurrent_kernel)
return output, output
@property
def output_size(self):
return self.units
cell = MinimalRNNCell(32)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer="rmsprop",
loss="mse",
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [MinimalRNNCell(8),
MinimalRNNCell(16),
MinimalRNNCell(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_rnn_with_time_major(self):
batch = 10
time_step = 5
embedding_dim = 4
units = 3
# Test basic case.
x = keras.Input((time_step, embedding_dim))
time_major_x = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
layer = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)
self.assertEqual(
layer.compute_output_shape((time_step, None,
embedding_dim)).as_list(),
[time_step, None, units])
y = layer(time_major_x)
self.assertEqual(layer.output_shape, (time_step, None, units))
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(y)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
# Test stacking.
x = keras.Input((time_step, embedding_dim))
time_major_x = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
cell_units = [10, 8, 6]
cells = [keras.layers.SimpleRNNCell(cell_units[i]) for i in range(3)]
layer = keras.layers.RNN(cells, time_major=True, return_sequences=True)
y = layer(time_major_x)
self.assertEqual(layer.output_shape, (time_step, None, cell_units[-1]))
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(y)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, cell_units[-1])))
# Test masking.
x = keras.Input((time_step, embedding_dim))
time_major = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
mask = keras.layers.Masking()(time_major)
rnn = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)(mask)
y = keras.layers.Lambda(lambda t: array_ops.transpose(t, [1, 0, 2]))(rnn)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
# Test layer output
x = keras.Input((time_step, embedding_dim))
rnn_1 = keras.layers.SimpleRNN(units, return_sequences=True)
y = rnn_1(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, embedding_dim)),
np.zeros((batch, time_step, units)))
x_np = np.random.random((batch, time_step, embedding_dim))
y_np_1 = model.predict(x_np)
time_major = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(x)
rnn_2 = keras.layers.SimpleRNN(
units, time_major=True, return_sequences=True)
y_2 = rnn_2(time_major)
y_2 = keras.layers.Lambda(
lambda t: array_ops.transpose(t, [1, 0, 2]))(y_2)
model_2 = keras.models.Model(x, y_2)
rnn_2.set_weights(rnn_1.get_weights())
y_np_2 = model_2.predict(x_np)
self.assertAllClose(y_np_1, y_np_2, atol=1e-4)
def test_rnn_cell_with_constants_layer(self):
# Test basic case.
x = keras.Input((None, 5))
c = keras.Input((3,))
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, c_np])
weights = model.get_weights()
config = layer.get_config()
custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# test flat list inputs.
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer([x, c])
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test GRUCell reset_after property.
x = keras.Input((None, 5))
c = keras.Input((3,))
cells = [keras.layers.recurrent.GRUCell(32, reset_after=True)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test stacked RNN serialization
x_np = np.random.random((6, 5, 5))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, c_np])
weights = model.get_weights()
config = layer.get_config()
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.recurrent.RNN.from_config(config.copy())
y = layer(x, constants=c)
model = keras.models.Model([x, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
def test_rnn_cell_with_non_keras_constants(self):
# Test basic case.
x = keras.Input((None, 5))
c = array_ops.zeros([6, 3], dtype=dtypes.float32)
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
y = layer(x, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_rnn_cell_with_constants_layer_passing_initial_state(self):
# Test basic case.
x = keras.Input((None, 5))
c = keras.Input((3,))
s = keras.Input((32,))
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model([x, s, c], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 3))],
np.zeros((6, 32))
)
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
s_np = np.random.random((6, 32))
c_np = np.random.random((6, 3))
y_np = model.predict([x_np, s_np, c_np])
weights = model.get_weights()
config = layer.get_config()
custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model([x, s, c], y)
model.set_weights(weights)
y_np_2 = model.predict([x_np, s_np, c_np])
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# verify that state is used
y_np_2_different_s = model.predict([x_np, s_np + 10., c_np])
with self.assertRaises(AssertionError):
self.assertAllClose(y_np, y_np_2_different_s, atol=1e-4)
# test flat list inputs
with generic_utils.CustomObjectScope(custom_objects):
layer = keras.layers.RNN.from_config(config.copy())
y = layer([x, s, c])
model = keras.models.Model([x, s, c], y)
model.set_weights(weights)
y_np_3 = model.predict([x_np, s_np, c_np])
self.assertAllClose(y_np, y_np_3, atol=1e-4)
def test_rnn_cell_with_non_keras_constants_and_initial_state(self):
# Test basic case.
x = keras.Input((None, 5))
c = array_ops.zeros([6, 3], dtype=dtypes.float32)
s = array_ops.zeros([6, 32], dtype=dtypes.float32)
cell = RNNCellWithConstants(32, constant_size=3)
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
# Test stacking.
cells = [keras.layers.recurrent.GRUCell(8),
RNNCellWithConstants(12, constant_size=3),
RNNCellWithConstants(32, constant_size=3)]
layer = keras.layers.recurrent.RNN(cells)
s = [array_ops.zeros([6, 8], dtype=dtypes.float32),
array_ops.zeros([6, 12], dtype=dtypes.float32),
array_ops.zeros([6, 32], dtype=dtypes.float32)]
y = layer(x, initial_state=s, constants=c)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32)))
def test_stacked_rnn_attributes(self):
if context.executing_eagerly():
self.skipTest('reduce_sum is not available in eager mode.')
cells = [keras.layers.LSTMCell(1),
keras.layers.LSTMCell(1)]
layer = keras.layers.RNN(cells)
layer.build((None, None, 1))
# Test weights
self.assertEqual(len(layer.trainable_weights), 6)
cells[0].trainable = False
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 3)
# Test `get_losses_for` and `losses`
x = keras.Input((None, 1))
loss_1 = math_ops.reduce_sum(x)
loss_2 = math_ops.reduce_sum(cells[0].kernel)
cells[0].add_loss(loss_1, inputs=x)
cells[0].add_loss(loss_2)
self.assertEqual(len(layer.losses), 2)
self.assertEqual(layer.get_losses_for(None), [loss_2])
self.assertEqual(layer.get_losses_for(x), [loss_1])
# Test `updates`
cells = [keras.layers.LSTMCell(1),
keras.layers.LSTMCell(1)]
layer = keras.layers.RNN(cells)
x = keras.Input((None, 1))
_ = layer(x)
update_1 = state_ops.assign_add(cells[0].kernel,
x[0, 0, 0] * cells[0].kernel)
update_2 = state_ops.assign_add(cells[0].kernel,
array_ops.ones_like(cells[0].kernel))
# TODO(b/128682878): Remove when RNNCells are __call__'d.
with base_layer_utils.call_context().enter(layer, x, True, None):
cells[0].add_update(update_1, inputs=x)
cells[0].add_update(update_2)
self.assertEqual(len(layer.updates), 2)
def test_rnn_dynamic_trainability(self):
layer_class = keras.layers.SimpleRNN
embedding_dim = 4
units = 3
layer = layer_class(units)
layer.build((None, None, embedding_dim))
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 0)
layer.trainable = False
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 0)
self.assertEqual(len(layer.non_trainable_weights), 3)
layer.trainable = True
self.assertEqual(len(layer.weights), 3)
self.assertEqual(len(layer.trainable_weights), 3)
self.assertEqual(len(layer.non_trainable_weights), 0)
@parameterized.parameters(
[keras.layers.SimpleRNN, keras.layers.GRU, keras.layers.LSTM])
def test_rnn_cell_trainability(self, layer_cls):
# https://github.com/tensorflow/tensorflow/issues/32369.
layer = layer_cls(3, trainable=False)
self.assertFalse(layer.cell.trainable)
layer.trainable = True
self.assertTrue(layer.cell.trainable)
def test_state_reuse_with_dropout(self):
layer_class = keras.layers.SimpleRNN
embedding_dim = 4
units = 3
timesteps = 2
num_samples = 2
input1 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
layer = layer_class(units,
return_state=True,
return_sequences=True,
dropout=0.2)
state = layer(input1)[1:]
input2 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim))
output = layer_class(units)(input2, initial_state=state)
model = keras.Model([input1, input2], output)
inputs = [np.random.random((num_samples, timesteps, embedding_dim)),
np.random.random((num_samples, timesteps, embedding_dim))]
model.predict(inputs)
def test_builtin_and_custom_rnn_cell_serialization(self):
@keras.utils.generic_utils.register_keras_serializable(package='TestOnly')
class CustomRNNCell(keras.layers.Layer):
def __init__(self, units, **kwargs):
self.units = units
self.state_size = units
super(CustomRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.kernel = self.add_weight(shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = keras.backend.dot(inputs, self.kernel)
output = h + keras.backend.dot(prev_output, self.recurrent_kernel)
return output, [output]
def get_config(self):
config = {'units': self.units}
base_config = super(CustomRNNCell, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
for cell_class in [keras.layers.SimpleRNNCell,
keras.layers.GRUCell,
keras.layers.LSTMCell,
CustomRNNCell]:
# Test basic case.
x = keras.Input((None, 5))
cell = cell_class(32)
layer = keras.layers.RNN(cell)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# Test basic case serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
# Test stacking.
cells = [cell_class(8),
cell_class(12),
cell_class(32)]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# Test stacked RNN serialization.
x_np = np.random.random((6, 5, 5))
y_np = model.predict(x_np)
weights = model.get_weights()
config = layer.get_config()
layer = keras.layers.RNN.from_config(config)
y = layer(x)
model = keras.models.Model(x, y)
model.set_weights(weights)
y_np_2 = model.predict(x_np)
self.assertAllClose(y_np, y_np_2, atol=1e-4)
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(
layer=[rnn_v1.SimpleRNN, rnn_v1.GRU, rnn_v1.LSTM,
rnn_v2.GRU, rnn_v2.LSTM],
unroll=[True, False]))
def test_rnn_dropout(self, layer, unroll):
rnn_layer = layer(3, dropout=0.1, recurrent_dropout=0.1, unroll=unroll)
if not unroll:
x = keras.Input((None, 5))
else:
x = keras.Input((5, 5))
y = rnn_layer(x)
model = keras.models.Model(x, y)
model.compile(
'sgd',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
x_np = np.random.random((6, 5, 5))
y_np = np.random.random((6, 3))
model.train_on_batch(x_np, y_np)
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(
cell=[keras.layers.SimpleRNNCell, keras.layers.GRUCell,
keras.layers.LSTMCell],
unroll=[True, False]))
def test_stacked_rnn_dropout(self, cell, unroll):
cells = [cell(3, dropout=0.1, recurrent_dropout=0.1),
cell(3, dropout=0.1, recurrent_dropout=0.1)]
layer = keras.layers.RNN(cells, unroll=unroll)
if not unroll:
x = keras.Input((None, 5))
else:
x = keras.Input((5, 5))
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
'sgd',
'mse',
run_eagerly=testing_utils.should_run_eagerly())
x_np = np.random.random((6, 5, 5))
y_np = np.random.random((6, 3))
model.train_on_batch(x_np, y_np)
def test_dropout_mask_reuse(self):
# The layer is created with recurrent_initializer = zero, so that the
# the recurrent state won't affect the output. By doing this, we can verify
# the output and see if the same mask is applied to for each timestep.
layer_1 = keras.layers.SimpleRNN(3,
dropout=0.5,
kernel_initializer='ones',
recurrent_initializer='zeros',
return_sequences=True,
unroll=True)
layer_2 = keras.layers.RNN(
keras.layers.SimpleRNNCell(3,
dropout=0.5,
kernel_initializer='ones',
recurrent_initializer='zeros'),
return_sequences=True,
unroll=True)
layer_3 = keras.layers.RNN(
[keras.layers.SimpleRNNCell(3,
dropout=0.5,
kernel_initializer='ones',
recurrent_initializer='zeros'),
keras.layers.SimpleRNNCell(3,
dropout=0.5,
kernel_initializer='ones',
recurrent_initializer='zeros')
],
return_sequences=True,
unroll=True)
def verify(rnn_layer):
inputs = constant_op.constant(1.0, shape=(6, 2, 5))
out = rnn_layer(inputs, training=True)
if not context.executing_eagerly():
self.evaluate(variables_lib.global_variables_initializer())
batch_1 = self.evaluate(out)
batch_1_t0, batch_1_t1 = batch_1[:, 0, :], batch_1[:, 1, :]
self.assertAllClose(batch_1_t0, batch_1_t1)
# This simulate the layer called with multiple batches in eager mode
if context.executing_eagerly():
out2 = rnn_layer(inputs, training=True)
else:
out2 = out
batch_2 = self.evaluate(out2)
batch_2_t0, batch_2_t1 = batch_2[:, 0, :], batch_2[:, 1, :]
self.assertAllClose(batch_2_t0, batch_2_t1)
# Also validate that different dropout is used by between batches.
self.assertNotAllClose(batch_1_t0, batch_2_t0)
self.assertNotAllClose(batch_1_t1, batch_2_t1)
for l in [layer_1, layer_2, layer_3]:
verify(l)
def test_stacked_rnn_compute_output_shape(self):
cells = [keras.layers.LSTMCell(3),
keras.layers.LSTMCell(6)]
embedding_dim = 4
timesteps = 2
layer = keras.layers.RNN(cells, return_state=True, return_sequences=True)
output_shape = layer.compute_output_shape((None, timesteps, embedding_dim))
expected_output_shape = [(None, timesteps, 6),
(None, 3),
(None, 3),
(None, 6),
(None, 6)]
self.assertEqual(
[tuple(o.as_list()) for o in output_shape],
expected_output_shape)
# Test reverse_state_order = True for stacked cell.
stacked_cell = keras.layers.StackedRNNCells(
cells, reverse_state_order=True)
layer = keras.layers.RNN(
stacked_cell, return_state=True, return_sequences=True)
output_shape = layer.compute_output_shape((None, timesteps, embedding_dim))
expected_output_shape = [(None, timesteps, 6),
(None, 6),
(None, 6),
(None, 3),
(None, 3)]
self.assertEqual(
[tuple(o.as_list()) for o in output_shape],
expected_output_shape)
def test_stacked_rnn_with_training_param(self):
# See https://github.com/tensorflow/tensorflow/issues/32586
class CellWrapper(keras.layers.AbstractRNNCell):
def __init__(self, cell):
super(CellWrapper, self).__init__()
self.cell = cell
@property
def state_size(self):
return self.cell.state_size
@property
def output_size(self):
return self.cell.output_size
def build(self, input_shape):
self.cell.build(input_shape)
self.built = True
def get_initial_state(self, inputs=None, batch_size=None, dtype=None):
return self.cell.get_initial_state(
inputs=inputs, batch_size=batch_size, dtype=dtype)
def call(self, inputs, states, training=None, **kwargs):
assert training is not None
return self.cell(inputs, states=states, training=training)
cell = keras.layers.LSTMCell(32)
cell = CellWrapper(cell)
cell = keras.layers.StackedRNNCells([cell])
rnn = keras.layers.RNN(cell)
inputs = np.ones((8, 4, 16), dtype=np.float32)
rnn(inputs, training=True)
def test_trackable_dependencies(self):
rnn = keras.layers.SimpleRNN
x = np.random.random((2, 2, 2))
y = np.random.random((2, 2))
model = keras.models.Sequential()
model.add(rnn(2))
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.fit(x, y, epochs=1, batch_size=1)
# check whether the model variables are present in the
# trackable list of objects
checkpointed_objects = {id(o) for o in trackable_util.list_objects(model)}
for v in model.variables:
self.assertIn(id(v), checkpointed_objects)
def test_high_dimension_RNN(self):
# Basic test case.
unit_a = 10
unit_b = 20
input_a = 5
input_b = 10
batch = 32
time_step = 4
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
layer = keras.layers.RNN(cell)
y = layer(x)
self.assertEqual(cell.state_size.as_list(), [unit_a, unit_b])
if not context.executing_eagerly():
init_state = layer.get_initial_state(x)
self.assertEqual(len(init_state), 1)
self.assertEqual(init_state[0].shape.as_list(), [None, unit_a, unit_b])
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a, unit_b)))
self.assertEqual(model.output_shape, (None, unit_a, unit_b))
# Test stacking.
cells = [
Minimal2DRNNCell(unit_a, unit_b),
Minimal2DRNNCell(unit_a * 2, unit_b * 2),
Minimal2DRNNCell(unit_a * 4, unit_b * 4)
]
layer = keras.layers.RNN(cells)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a * 4, unit_b * 4)))
self.assertEqual(model.output_shape, (None, unit_a * 4, unit_b * 4))
def test_high_dimension_RNN_with_init_state(self):
unit_a = 10
unit_b = 20
input_a = 5
input_b = 10
batch = 32
time_step = 4
# Basic test case.
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
s = keras.Input((unit_a, unit_b))
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=s)
model = keras.models.Model([x, s], y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch([
np.zeros((batch, time_step, input_a, input_b)),
np.zeros((batch, unit_a, unit_b))
], np.zeros((batch, unit_a, unit_b)))
self.assertEqual(model.output_shape, (None, unit_a, unit_b))
# Bad init state shape.
bad_shape_a = unit_a * 2
bad_shape_b = unit_b * 2
cell = Minimal2DRNNCell(unit_a, unit_b)
x = keras.Input((None, input_a, input_b))
s = keras.Input((bad_shape_a, bad_shape_b))
layer = keras.layers.RNN(cell)
with self.assertRaisesWithPredicateMatch(ValueError,
'however `cell.state_size` is'):
layer(x, initial_state=s)
def test_inconsistent_output_state_size(self):
batch = 32
time_step = 4
state_size = 5
input_size = 6
cell = PlusOneRNNCell(state_size)
x = keras.Input((None, input_size))
layer = keras.layers.RNN(cell)
y = layer(x)
self.assertEqual(cell.state_size, state_size)
if not context.executing_eagerly():
init_state = layer.get_initial_state(x)
self.assertEqual(len(init_state), 1)
self.assertEqual(init_state[0].shape.as_list(), [None, state_size])
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, time_step, input_size)),
np.zeros((batch, input_size)))
self.assertEqual(model.output_shape, (None, input_size))
def test_get_initial_state(self):
cell = keras.layers.SimpleRNNCell(5)
with self.assertRaisesRegex(ValueError,
'batch_size and dtype cannot be None'):
cell.get_initial_state(None, None, None)
if not context.executing_eagerly():
inputs = keras.Input((None, 10))
initial_state = cell.get_initial_state(inputs, None, None)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
batch = array_ops.shape(inputs)[0]
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [None, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
else:
batch = 8
inputs = np.random.random((batch, 10))
initial_state = cell.get_initial_state(inputs, None, None)
self.assertEqual(initial_state.shape.as_list(), [8, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
dtype = inputs.dtype
initial_state = cell.get_initial_state(None, batch, dtype)
self.assertEqual(initial_state.shape.as_list(), [batch, 5])
self.assertEqual(initial_state.dtype, inputs.dtype)
@parameterized.parameters([True, False])
def test_nested_input_output(self, stateful):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell, stateful=stateful)
batch_size = batch if stateful else None
input_1 = keras.Input((t, i1), batch_size=batch_size)
input_2 = keras.Input((t, i2, i3), batch_size=batch_size)
outputs = rnn((input_1, input_2))
self.assertEqual(len(outputs), 2)
self.assertEqual(outputs[0].shape.as_list(), [batch_size, o1])
self.assertEqual(outputs[1].shape.as_list(), [batch_size, o2, o3])
model = keras.models.Model((input_1, input_2), outputs)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)), np.zeros((batch, t, i2, i3))],
[np.zeros((batch, o1)), np.zeros((batch, o2, o3))])
self.assertEqual(model.output_shape, [(batch_size, o1),
(batch_size, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell, stateful=stateful)
input_1 = keras.Input((t, i1), batch_size=batch_size)
input_2 = keras.Input((t, i2, i3), batch_size=batch_size)
outputs = rnn(NestedInput(t1=input_1, t2=input_2))
self.assertEqual(len(outputs), 2)
self.assertEqual(outputs[0].shape.as_list(), [batch_size, o1])
self.assertEqual(outputs[1].shape.as_list(), [batch_size, o2, o3])
model = keras.models.Model([input_1, input_2], outputs)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, o1)), np.zeros((batch, o2, o3))])
self.assertEqual(model.output_shape, [(batch_size, o1),
(batch_size, o2, o3)])
def test_nested_input_output_with_state(self):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
output1, output2, s1, s2 = rnn((input_1, input_2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2], [output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
output1, output2, s1, s2 = rnn(NestedInput(t1=input_1, t2=input_2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2], [output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
def test_nest_input_output_with_init_state(self):
batch = 10
t = 5
i1, i2, i3 = 3, 4, 5
o1, o2, o3 = 2, 3, 4
cell = NestedCell(o1, o2, o3)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
init_s1 = keras.Input((o1,))
init_s2 = keras.Input((o2, o3))
output1, output2, s1, s2 = rnn((input_1, input_2),
initial_state=(init_s1, init_s2))
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2, init_s1, init_s2],
[output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3)),
np.zeros((batch, o1)),
np.zeros((batch, o2, o3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
cell = NestedCell(o1, o2, o3, use_tuple=True)
rnn = keras.layers.RNN(cell, return_sequences=True, return_state=True)
input_1 = keras.Input((t, i1))
input_2 = keras.Input((t, i2, i3))
init_s1 = keras.Input((o1,))
init_s2 = keras.Input((o2, o3))
init_state = NestedState(s1=init_s1, s2=init_s2)
output1, output2, s1, s2 = rnn(NestedInput(t1=input_1, t2=input_2),
initial_state=init_state)
self.assertEqual(output1.shape.as_list(), [None, t, o1])
self.assertEqual(output2.shape.as_list(), [None, t, o2, o3])
self.assertEqual(s1.shape.as_list(), [None, o1])
self.assertEqual(s2.shape.as_list(), [None, o2, o3])
model = keras.models.Model([input_1, input_2, init_s1, init_s2],
[output1, output2])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
[np.zeros((batch, t, i1)),
np.zeros((batch, t, i2, i3)),
np.zeros((batch, o1)),
np.zeros((batch, o2, o3))],
[np.zeros((batch, t, o1)),
np.zeros((batch, t, o2, o3))])
self.assertEqual(model.output_shape, [(None, t, o1), (None, t, o2, o3)])
def test_peephole_lstm_cell(self):
def _run_cell(cell_fn, **kwargs):
inputs = array_ops.one_hot([1, 2, 3, 4], 4)
cell = cell_fn(5, **kwargs)
cell.build(inputs.shape)
initial_state = cell.get_initial_state(
inputs=inputs, batch_size=4, dtype=dtypes.float32)
inputs, _ = cell(inputs, initial_state)
output = inputs
if not context.executing_eagerly():
self.evaluate(variables_lib.global_variables_initializer())
output = self.evaluate(output)
return output
random_seed.set_random_seed(12345)
# `recurrent_activation` kwarg is set to sigmoid as that is hardcoded into
# rnn_cell.LSTMCell.
no_peephole_output = _run_cell(
keras.layers.LSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=1)
first_implementation_output = _run_cell(
keras.layers.PeepholeLSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=1)
second_implementation_output = _run_cell(
keras.layers.PeepholeLSTMCell,
kernel_initializer='ones',
recurrent_activation='sigmoid',
implementation=2)
tf_lstm_cell_output = _run_cell(
rnn_cell_impl.LSTMCell,
use_peepholes=True,
initializer=init_ops.ones_initializer)
self.assertNotAllClose(first_implementation_output, no_peephole_output)
self.assertAllClose(first_implementation_output,
second_implementation_output)
self.assertAllClose(first_implementation_output, tf_lstm_cell_output)
def test_masking_rnn_with_output_and_states(self):
class Cell(keras.layers.Layer):
def __init__(self):
self.state_size = None
self.output_size = None
super(Cell, self).__init__()
def build(self, input_shape):
self.state_size = input_shape[-1]
self.output_size = input_shape[-1]
def call(self, inputs, states):
return inputs, [s + 1 for s in states]
x = keras.Input((3, 1), name='x')
x_masked = keras.layers.Masking()(x)
s_0 = keras.Input((1,), name='s_0')
y, s = keras.layers.RNN(
Cell(), return_state=True)(x_masked, initial_state=s_0)
model = keras.models.Model([x, s_0], [y, s])
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
# last time step masked
x_np = np.array([[[1.], [2.], [0.]]])
s_0_np = np.array([[10.]])
y_np, s_np = model.predict([x_np, s_0_np])
# 1 is added to initial state two times
self.assertAllClose(s_np, s_0_np + 2)
# Expect last output to be the same as last output before masking
self.assertAllClose(y_np, x_np[:, 1, :])
def test_zero_output_for_masking(self):
for unroll in [True, False]:
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((5, 5))
mask = keras.layers.Masking()
layer = keras.layers.RNN(
cell, return_sequences=True, zero_output_for_mask=True, unroll=unroll)
masked_input = mask(x)
y = layer(masked_input)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
np_x = np.ones((6, 5, 5))
result_1 = model.predict(np_x)
# set the time 4 and 5 for last record to be zero (masked).
np_x[5, 3:] = 0
result_2 = model.predict(np_x)
# expect the result_2 has same output, except the time 4,5 for last
# record.
result_1[5, 3:] = 0
self.assertAllClose(result_1, result_2)
def test_unroll_single_step(self):
"""Even if the time dimension is only one, we should be able to unroll."""
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((1, 5))
layer = keras.layers.RNN(cell, return_sequences=True, unroll=True)
y = layer(x)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
np_x = np.ones((6, 1, 5))
result = model.predict(np_x)
self.assertEqual((6, 1, 5), result.shape)
def test_unroll_zero_step(self):
"""If the time dimension is None, we should fail to unroll."""
cell = keras.layers.SimpleRNNCell(5)
x = keras.Input((None, 5))
layer = keras.layers.RNN(cell, return_sequences=True, unroll=True)
with self.assertRaisesRegex(ValueError, 'Cannot unroll a RNN.*'):
layer(x)
def test_full_input_spec(self):
# See https://github.com/tensorflow/tensorflow/issues/25985
inputs = keras.layers.Input(batch_shape=(1, 1, 1))
state_h = keras.layers.Input(batch_shape=(1, 1))
state_c = keras.layers.Input(batch_shape=(1, 1))
states = [state_h, state_c]
decoder_out = keras.layers.LSTM(1, stateful=True)(
inputs,
initial_state=states
)
model = keras.Model([inputs, state_h, state_c], decoder_out)
output1 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
output2 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
model.reset_states()
output3 = model.predict(
[np.ones((1, 1, 1)), np.ones((1, 1)), np.ones((1, 1))])
self.assertAllClose(output1, output3)
self.assertNotAllClose(output1, output2)
def test_reset_states(self):
# See https://github.com/tensorflow/tensorflow/issues/25852
with self.assertRaisesRegex(ValueError, 'it needs to know its batch size'):
simple_rnn = keras.layers.SimpleRNN(1, stateful=True)
simple_rnn.reset_states()
with self.assertRaisesRegex(ValueError, 'it needs to know its batch size'):
cell = Minimal2DRNNCell(1, 2)
custom_rnn = keras.layers.RNN(cell, stateful=True)
custom_rnn.reset_states()
@parameterized.parameters(
[keras.layers.SimpleRNNCell, keras.layers.GRUCell, keras.layers.LSTMCell])
def test_stateful_rnn_with_stacking(self, cell):
# See https://github.com/tensorflow/tensorflow/issues/28614.
batch = 12
timesteps = 10
input_dim = 8
output_dim = 64
cells = [cell(32), cell(64)]
x = keras.Input(batch_shape=(batch, None, input_dim))
layer = keras.layers.RNN(cells, stateful=True)
y = layer(x)
model = keras.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, timesteps, input_dim)),
np.zeros((batch, output_dim)))
model.predict(np.ones((batch, timesteps, input_dim)))
model.reset_states()
model.predict(np.ones((batch, timesteps, input_dim)))
new_states = nest.map_structure(lambda s: np.ones((batch, s)),
layer.cell.state_size)
layer.reset_states(new_states)
model.predict(np.ones((batch, timesteps, input_dim)))
def test_stateful_rnn_with_initial_state(self):
# See https://github.com/tensorflow/tensorflow/issues/32299.
batch = 12
timesteps = 1
input_dim = 8
output_dim = 16
test_inputs = np.full((batch, timesteps, input_dim), 0.5)
def make_model(stateful=False, with_initial_state=False):
input_layer = keras.Input(shape=(None, input_dim), batch_size=batch)
if with_initial_state:
initial_states = keras.backend.constant(np.ones((batch, output_dim)))
else:
initial_states = None
rnn_output = keras.layers.GRU(
units=output_dim, return_sequences=True, stateful=stateful)(
input_layer, initial_state=initial_states)
model = keras.Model(input_layer, rnn_output)
model.compile(
optimizer='rmsprop', loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
return model
# Define a model with a constant state initialization
model = make_model(stateful=True, with_initial_state=True)
layer_weights = model.layers[1].get_weights()
model.reset_states()
predict_1 = model.predict(test_inputs)
predict_2 = model.predict(test_inputs)
model.reset_states()
predict_3 = model.predict(test_inputs)
# predict 1 and 2 should be different since the batch 2 should use the state
# from batch 1 as the initial state.
self.assertNotAllClose(predict_1, predict_2)
self.assertAllClose(predict_1, predict_3)
# Create a new model with same weights but without initial states. Make sure
# the predict value is different from the model with non-zero initial state.
model_2 = make_model(stateful=True, with_initial_state=False)
model_2.layers[1].set_weights(layer_weights)
model_2.reset_states()
predict_4 = model_2.predict(test_inputs)
predict_5 = model_2.predict(test_inputs)
self.assertNotAllClose(predict_1, predict_4)
self.assertNotAllClose(predict_4, predict_5)
# Create models with stateful=False, and make sure they handle init state
# correctly.
model_3 = make_model(stateful=False, with_initial_state=True)
model_3.layers[1].set_weights(layer_weights)
model_3.reset_states()
predict_6 = model_3.predict(test_inputs)
predict_7 = model_3.predict(test_inputs)
self.assertAllClose(predict_1, predict_6)
self.assertAllClose(predict_6, predict_7)
def test_stateful_rnn_with_customized_get_initial_state(self):
class TestCell(keras.layers.AbstractRNNCell):
state_size = 1
output_size = 2
def get_initial_state(self, inputs=None, batch_size=None, dtype=None):
return np.ones((batch_size, 1), dtype=dtype)
def call(self, inputs, states):
return inputs, states
layer = keras.layers.RNN(TestCell(), stateful=True, return_state=True)
inputs = keras.Input(shape=(10, 2), batch_size=4)
model = keras.Model(inputs, layer(inputs))
x = np.ones((4, 10, 2), dtype=np.float32)
output, state = model.predict(x)
self.assertAllClose(output, np.ones((4, 2)))
self.assertAllClose(state, np.ones((4, 1)))
def test_input_dim_length(self):
simple_rnn = keras.layers.SimpleRNN(5, input_length=10, input_dim=8)
self.assertEqual(simple_rnn._batch_input_shape, (None, 10, 8))
simple_rnn = keras.layers.SimpleRNN(5, input_dim=8)
self.assertEqual(simple_rnn._batch_input_shape, (None, None, 8))
simple_rnn = keras.layers.SimpleRNN(5, input_length=10)
self.assertEqual(simple_rnn._batch_input_shape, (None, 10, None))
@parameterized.parameters(
[keras.layers.SimpleRNNCell, keras.layers.GRUCell, keras.layers.LSTMCell])
def test_state_spec_with_stack_cell(self, cell):
# See https://github.com/tensorflow/tensorflow/issues/27817 for more detail.
batch = 12
timesteps = 10
input_dim = 8
output_dim = 8
def create_cell():
return [cell(output_dim),
cell(output_dim),
cell(output_dim)]
inputs = keras.Input((timesteps, input_dim))
encoder_output = keras.layers.RNN(create_cell(), return_state=True)(inputs)
states = encoder_output[1:]
decoder_output = keras.layers.RNN(
create_cell())(inputs, initial_state=states)
model = keras.models.Model(inputs, decoder_output)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(
np.zeros((batch, timesteps, input_dim)),
np.zeros((batch, output_dim)))
model.predict(np.ones((batch, timesteps, input_dim)))
@parameterized.named_parameters(
*testing_utils.generate_combinations_with_testcase_name(layer=[
rnn_v1.SimpleRNN, rnn_v1.GRU, rnn_v1.LSTM, rnn_v2.GRU, rnn_v2.LSTM
]))
def test_rnn_with_ragged_input(self, layer):
ragged_data = ragged_factory_ops.constant(
[[[1., 1., 1., 1., 1.], [1., 2., 3., 1., 1.]],
[[2., 4., 1., 3., 1.]],
[[2., 3., 4., 1., 5.], [2., 3., 1., 1., 1.], [1., 2., 3., 4., 5.]]],
ragged_rank=1)
label_data = np.array([[1, 0, 1], [1, 1, 0], [0, 0, 1]])
# Test results in feed forward
np.random.seed(100)
rnn_layer = layer(4, activation='sigmoid')
x_ragged = keras.Input(shape=(None, 5), ragged=True)
y_ragged = rnn_layer(x_ragged)
model = keras.models.Model(x_ragged, y_ragged)
output_ragged = model.predict(ragged_data, steps=1)
x_dense = keras.Input(shape=(3, 5))
masking = keras.layers.Masking()(x_dense)
y_dense = rnn_layer(masking)
model_2 = keras.models.Model(x_dense, y_dense)
dense_data = ragged_data.to_tensor()
output_dense = model_2.predict(dense_data, steps=1)
self.assertAllClose(output_dense, output_ragged)
# Test results with go backwards
np.random.seed(200)
back_rnn_layer = layer(8, go_backwards=True, activation='sigmoid')
x_ragged = keras.Input(shape=(None, 5), ragged=True)
y_ragged = back_rnn_layer(x_ragged)
model = keras.models.Model(x_ragged, y_ragged)
output_ragged = model.predict(ragged_data, steps=1)
x_dense = keras.Input(shape=(3, 5))
masking = keras.layers.Masking()(x_dense)
y_dense = back_rnn_layer(masking)
model_2 = keras.models.Model(x_dense, y_dense)
dense_data = ragged_data.to_tensor()
output_dense = model_2.predict(dense_data, steps=1)
self.assertAllClose(output_dense, output_ragged)
# Test densification of the ragged input
dense_tensor, row_lengths = keras.backend.convert_inputs_if_ragged(
ragged_data)
self.assertAllClose(dense_data, dense_tensor)
# Test optional params, all should work except unrolling
inputs = keras.Input(shape=(None, 5), dtype=dtypes.float32, ragged=True)
custom_rnn_layer = layer(
3, zero_output_for_mask=True, dropout=0.1, use_bias=True)
outputs = custom_rnn_layer(inputs)
model = keras.models.Model(inputs, outputs)
model.compile(
optimizer='sgd',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(ragged_data, label_data)
# Test stateful and full shape specification
inputs = keras.Input(
shape=(None, 5), batch_size=3, dtype=dtypes.float32, ragged=True)
stateful_rnn_layer = layer(3, stateful=True)
outputs = stateful_rnn_layer(inputs)
model = keras.models.Model(inputs, outputs)
model.compile(
optimizer='sgd',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(ragged_data, label_data)
# Must raise error when unroll is set to True
unroll_rnn_layer = layer(3, unroll=True)
with self.assertRaisesRegex(ValueError,
'The input received contains RaggedTensors *'):
unroll_rnn_layer(inputs)
# Check if return sequences outputs are correct
np.random.seed(100)
returning_rnn_layer = layer(4, return_sequences=True)
x_ragged = keras.Input(shape=(None, 5), ragged=True)
y_ragged = returning_rnn_layer(x_ragged)
model = keras.models.Model(x_ragged, y_ragged)
output_ragged = model.predict(ragged_data, steps=1)
self.assertAllClose(output_ragged.ragged_rank, ragged_data.ragged_rank)
self.assertAllClose(output_ragged.row_splits, ragged_data.row_splits)
x_dense = keras.Input(shape=(3, 5))
masking = keras.layers.Masking()(x_dense)
y_dense = returning_rnn_layer(masking)
model_2 = keras.models.Model(x_dense, y_dense)
dense_data = ragged_data.to_tensor()
output_dense = model_2.predict(dense_data, steps=1)
# Convert the output here to ragged for value comparison
output_dense = ragged_tensor.RaggedTensor.from_tensor(
output_dense, lengths=row_lengths)
self.assertAllClose(output_ragged, output_dense)
# Check if return sequences and go_backwards outputs are correct
np.random.seed(100)
returning_rnn_layer = layer(4, go_backwards=True, return_sequences=True)
x_ragged = keras.Input(shape=(None, 5), ragged=True)
y_ragged = returning_rnn_layer(x_ragged)
model = keras.models.Model(x_ragged, y_ragged)
output_ragged = model.predict(ragged_data, steps=1)
self.assertAllClose(output_ragged.ragged_rank, ragged_data.ragged_rank)
self.assertAllClose(output_ragged.row_splits, ragged_data.row_splits)
x_dense = keras.Input(shape=(3, 5))
masking = keras.layers.Masking()(x_dense)
y_dense = returning_rnn_layer(masking)
model_2 = keras.models.Model(x_dense, y_dense)
dense_data = ragged_data.to_tensor()
output_dense = model_2.predict(dense_data, steps=1)
# Note that the raw output for dense and ragged input when go_backward=True
# will be different. Consider following input
# [[a, b, 0], [c, 0, 0], [d, e, f]] where 0s are masked value.
# The dense output will be [[0, b, a], [0, 0, c], [f, e, d]] since it will
# process the whole sequence from the end.
# While ragged output will be [[b, a], [c], [f, e, d]] since it just ignore
# the 0s. And if we densify the ragged output, it will by default inserting
# 0s to the end (rather than from the beginning), which make the output to
# be [[b, a, 0], [c, 0, 0], [f, e, d]]. With this, we need to verify that
# reverse(ragged_output.to_tensor()) == reverse(dense_output)
output_dense = keras.backend.reverse(output_dense, [1])
output_dense = ragged_tensor.RaggedTensor.from_tensor(
output_dense, lengths=row_lengths)
self.assertAllClose(keras.backend.reverse(output_ragged, [1]), output_dense)
def test_stateless_rnn_cell(self):
class StatelessCell(keras.layers.Layer):
def __init__(self):
self.state_size = ((), [], ())
self.output_size = None
super(StatelessCell, self).__init__()
def build(self, input_shape):
self.output_size = input_shape[-1]
def call(self, inputs, states):
return inputs, states
x = keras.Input((None, 5))
cell = StatelessCell()
initial_state = nest.map_structure(lambda t: None, cell.state_size)
layer = keras.layers.RNN(cell)
y = layer(x, initial_state=initial_state)
model = keras.models.Model(x, y)
model.compile(
optimizer='rmsprop',
loss='mse',
run_eagerly=testing_utils.should_run_eagerly())
model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 5)))
@parameterized.parameters(
[rnn_v1.SimpleRNN, rnn_v1.GRU, rnn_v1.LSTM, rnn_v2.GRU, rnn_v2.LSTM])
def test_for_enable_caching_device_for_layer(self, layer_cls):
expected_caching_device = ops.executing_eagerly_outside_functions()
layer = layer_cls(1)
self.assertEqual(layer.cell._enable_caching_device, expected_caching_device)
# Make sure the config only appears when the none default value is used.
config = layer.get_config()
self.assertNotIn('enable_caching_device', config)
non_default_value = not expected_caching_device
layer = layer_cls(1, enable_caching_device=non_default_value)
self.assertEqual(layer.cell._enable_caching_device, non_default_value)
config = layer.get_config()
self.assertEqual(config['enable_caching_device'], non_default_value)
@parameterized.parameters(
[rnn_v1.SimpleRNNCell, rnn_v1.GRUCell, rnn_v1.LSTMCell, rnn_v2.GRUCell,
rnn_v2.LSTMCell])
def test_for_enable_caching_device_for_cell(self, cell_cls):
expected_caching_device = ops.executing_eagerly_outside_functions()
cell = cell_cls(1)
self.assertEqual(cell._enable_caching_device, expected_caching_device)
# Make sure the config only appears when the none default value is used.
config = cell.get_config()
self.assertNotIn('enable_caching_device', config)
non_default_value = not expected_caching_device
cell = cell_cls(1, enable_caching_device=non_default_value)
self.assertEqual(cell._enable_caching_device, non_default_value)
config = cell.get_config()
self.assertEqual(config['enable_caching_device'], non_default_value)
class RNNCellWithConstants(keras.layers.Layer):
def __init__(self, units, constant_size, **kwargs):
self.units = units
self.state_size = units
self.constant_size = constant_size
super(RNNCellWithConstants, self).__init__(**kwargs)
def build(self, input_shape):
self.input_kernel = self.add_weight(
shape=(input_shape[-1], self.units),
initializer='uniform',
name='kernel')
self.recurrent_kernel = self.add_weight(
shape=(self.units, self.units),
initializer='uniform',
name='recurrent_kernel')
self.constant_kernel = self.add_weight(
shape=(self.constant_size, self.units),
initializer='uniform',
name='constant_kernel')
self.built = True
def call(self, inputs, states, constants):
[prev_output] = states
[constant] = constants
h_input = keras.backend.dot(inputs, self.input_kernel)
h_state = keras.backend.dot(prev_output, self.recurrent_kernel)
h_const = keras.backend.dot(constant, self.constant_kernel)
output = h_input + h_state + h_const
return output, [output]
def get_config(self):
config = {'units': self.units, 'constant_size': self.constant_size}
base_config = super(RNNCellWithConstants, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class Minimal2DRNNCell(keras.layers.Layer):
"""The minimal 2D RNN cell is a simple combination of 2 1-D RNN cell.
Both internal state and output have 2 dimensions and are orthogonal
between each other.
"""
def __init__(self, unit_a, unit_b, **kwargs):
self.unit_a = unit_a
self.unit_b = unit_b
self.state_size = tensor_shape.TensorShape([unit_a, unit_b])
self.output_size = tensor_shape.TensorShape([unit_a, unit_b])
super(Minimal2DRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
input_a = input_shape[-2]
input_b = input_shape[-1]
self.kernel = self.add_weight(
shape=(input_a, input_b, self.unit_a, self.unit_b),
initializer='uniform',
name='kernel')
self.recurring_kernel = self.add_weight(
shape=(self.unit_a, self.unit_b, self.unit_a, self.unit_b),
initializer='uniform',
name='recurring_kernel')
self.bias = self.add_weight(
shape=(self.unit_a, self.unit_b), initializer='uniform', name='bias')
self.built = True
def call(self, inputs, states):
prev_output = states[0]
h = special_math_ops.einsum('bij,ijkl->bkl', inputs, self.kernel)
h += array_ops.expand_dims(self.bias, axis=0)
output = h + special_math_ops.einsum('bij,ijkl->bkl', prev_output,
self.recurring_kernel)
return output, [output]
class PlusOneRNNCell(keras.layers.Layer):
"""Add one to the input and state.
This cell is used for testing state_size and output_size."""
def __init__(self, num_unit, **kwargs):
self.state_size = num_unit
super(PlusOneRNNCell, self).__init__(**kwargs)
def build(self, input_shape):
self.output_size = input_shape[-1]
def call(self, inputs, states):
return inputs + 1, [states[0] + 1]
class NestedCell(keras.layers.Layer):
def __init__(self, unit_1, unit_2, unit_3, use_tuple=False, **kwargs):
self.unit_1 = unit_1
self.unit_2 = unit_2
self.unit_3 = unit_3
self.use_tuple = use_tuple
super(NestedCell, self).__init__(**kwargs)
# A nested state.
if use_tuple:
self.state_size = NestedState(
s1=unit_1, s2=tensor_shape.TensorShape([unit_2, unit_3]))
else:
self.state_size = (unit_1, tensor_shape.TensorShape([unit_2, unit_3]))
self.output_size = (unit_1, tensor_shape.TensorShape([unit_2, unit_3]))
def build(self, inputs_shape):
# expect input_shape to contain 2 items, [(batch, i1), (batch, i2, i3)]
if self.use_tuple:
input_1 = inputs_shape.t1[1]
input_2, input_3 = inputs_shape.t2[1:]
else:
input_1 = inputs_shape[0][1]
input_2, input_3 = inputs_shape[1][1:]
self.kernel_1 = self.add_weight(
shape=(input_1, self.unit_1), initializer='uniform', name='kernel_1')
self.kernel_2_3 = self.add_weight(
shape=(input_2, input_3, self.unit_2, self.unit_3),
initializer='uniform',
name='kernel_2_3')
def call(self, inputs, states):
# inputs should be in [(batch, input_1), (batch, input_2, input_3)]
# state should be in shape [(batch, unit_1), (batch, unit_2, unit_3)]
flatten_inputs = nest.flatten(inputs)
s1, s2 = states
output_1 = math_ops.matmul(flatten_inputs[0], self.kernel_1)
output_2_3 = special_math_ops.einsum('bij,ijkl->bkl', flatten_inputs[1],
self.kernel_2_3)
state_1 = s1 + output_1
state_2_3 = s2 + output_2_3
output = [output_1, output_2_3]
new_states = NestedState(s1=state_1, s2=state_2_3)
return output, new_states
if __name__ == '__main__':
test.main()
| 36.354805 | 80 | 0.650672 |
acf4edf314bff68b8cb364d43ee8fcb80b103506 | 1,163 | py | Python | crazyflie_demo/scripts/test_high_level.py | wuwushrek/crazyflie_ros | 252064f321f530b5e00466097f420f64a9ccb20f | [
"MIT"
] | 1 | 2019-03-17T02:35:13.000Z | 2019-03-17T02:35:13.000Z | crazyflie_demo/scripts/test_high_level.py | wuwushrek/crazyflie_ros | 252064f321f530b5e00466097f420f64a9ccb20f | [
"MIT"
] | null | null | null | crazyflie_demo/scripts/test_high_level.py | wuwushrek/crazyflie_ros | 252064f321f530b5e00466097f420f64a9ccb20f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import rospy
import crazyflie
import time
import uav_trajectory
if __name__ == '__main__':
rospy.init_node('test_high_level')
cf = crazyflie.Crazyflie("crazyflie", "/vicon/crazyflie/crazyflie")
cf.setParam("commander/enHighLevel", 1)
cf.setParam("stabilizer/estimator", 2) # Use EKF
cf.setParam("stabilizer/controller", 2) # Use mellinger controller
# cf.takeoff(targetHeight = 0.5, duration = 2.0)
# time.sleep(3.0)
# cf.goTo(goal = [0.5, 0.0, 0.0], yaw=0.2, duration = 2.0, relative = True)
# time.sleep(3.0)
# cf.land(targetHeight = 0.0, duration = 2.0)
traj1 = uav_trajectory.Trajectory()
traj1.loadcsv("takeoff.csv")
traj2 = uav_trajectory.Trajectory()
traj2.loadcsv("figure8.csv")
print(traj1.duration)
cf.uploadTrajectory(0, 0, traj1)
cf.uploadTrajectory(1, len(traj1.polynomials), traj2)
cf.startTrajectory(0, timescale=1.0)
time.sleep(traj1.duration * 2.0)
cf.startTrajectory(1, timescale=2.0)
time.sleep(traj2.duration * 2.0)
cf.startTrajectory(0, timescale=1.0, reverse=True)
time.sleep(traj1.duration * 1.0)
cf.stop()
| 25.282609 | 79 | 0.672399 |
acf4ee0fc5bddf40aaf6b5195ef8fbc4aea1e114 | 1,036 | py | Python | lab/lab14/Q1.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | 1 | 2020-01-04T13:35:29.000Z | 2020-01-04T13:35:29.000Z | lab/lab14/Q1.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | null | null | null | lab/lab14/Q1.py | c235gsy/Sustech_Data-Structure-and-Algorithm-Analysis | fcbd450216e9e62cd3365ad2a8ccab00b9eb679f | [
"MIT"
] | null | null | null |
def Dijkstra(G,s,INF=999):
book = set()
path = dict((k, []) for k in G.keys())
minv = s
dis = dict((k, INF) for k in G.keys())
dis[s] = 0
path[s] = [s]
while len(book) < len(G):
book.add(minv)
for w in G[minv]:
if dis[minv] + G[minv][w] < dis[w]:
path[w] = path[minv] + [w]
dis[w] = dis[minv] + G[minv][w]
new = INF
for v in dis.keys():
if v in book:
continue
if dis[v] < new:
new = dis[v]
minv = v
return dis, path
G = {"V1": {"V2": 2, "V4": 1},
"V2": {"V4": 3, "V5": 10},
"V3": {"V1": 4, "V6": 5},
"V4": {"V6": 8, "V7": 4, "V3": 2, "V5": 2},
"V5": {"V7": 6},
"V6": {},
"V7": {"V6": 1}}
dis, path = Dijkstra(G, s="V1")
s = "V1"
print("The start vertex is the V1\n")
for v in dis.keys():
print("The shortest weighted path from %s to %s is : %d" % (s, v, dis[v]))
print("The path is: %s \n" % ("->".join(path[v]))) | 26.564103 | 78 | 0.411197 |
acf4ee3fa224ce5c3ceae0a2ddd8ffba17684160 | 326 | py | Python | apps/post/urls.py | Rainlv/next-server | 59e97ae2bc42d6da08b9e17d75810541f8caa3d9 | [
"MIT"
] | null | null | null | apps/post/urls.py | Rainlv/next-server | 59e97ae2bc42d6da08b9e17d75810541f8caa3d9 | [
"MIT"
] | null | null | null | apps/post/urls.py | Rainlv/next-server | 59e97ae2bc42d6da08b9e17d75810541f8caa3d9 | [
"MIT"
] | 8 | 2022-02-18T09:37:44.000Z | 2022-03-10T05:43:34.000Z | from django.urls import path
from apps.post import views as user_views
urlpatterns = [
path('post', user_views.get_post_list, name = 'get_post_list'),
path('post/<int:post_id>', user_views.get_post_by_id, name = 'get_post_by_id'),
path('/manage/post/<int:post_id>', user_views.update_post, name = 'update_post')
] | 40.75 | 84 | 0.730061 |
acf4ee41adbe1d7a59bbd0fa9f1e7a2a48d20f99 | 2,947 | py | Python | inputs/models.py | asommer70/formation | 88281c935d242cc9b77882c71fedd604948ee6f9 | [
"MIT"
] | null | null | null | inputs/models.py | asommer70/formation | 88281c935d242cc9b77882c71fedd604948ee6f9 | [
"MIT"
] | null | null | null | inputs/models.py | asommer70/formation | 88281c935d242cc9b77882c71fedd604948ee6f9 | [
"MIT"
] | null | null | null | from django.contrib.auth import get_user_model
from django.contrib.postgres.fields import JSONField
from django.db import models
from django.utils import timezone
from forms.models import Form
from routes.models import Route, Destination
import logging
User = get_user_model()
logger = logging.getLogger(__name__)
class Input(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=255, default="new")
data = JSONField(blank=True, null=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None)
form = models.ForeignKey(Form, on_delete=models.CASCADE, default=None)
route = models.ForeignKey(
Route,
on_delete=models.SET_NULL,
default=None,
null=True,
blank=True
)
route_date = models.DateTimeField(blank=True, null=True)
route_holder = models.ForeignKey(
User,
on_delete=models.CASCADE,
default=None,
null=True,
blank=True,
related_name='user_holding_input'
)
route_sender = models.ForeignKey(
User,
on_delete=models.CASCADE,
default=None,
null=True,
blank=True,
related_name='user_sent_input'
)
current_dest = models.ForeignKey(
Destination,
on_delete=models.CASCADE,
default=None,
blank=True,
null=True
)
step = models.IntegerField(
default=0,
blank=True,
null=True
)
class Meta:
ordering = ['-created_at']
def __str__(self):
return self.user.username + ": " + self.status
def save(self, *args, **kwargs):
logger.warning('{} | Input: "{}" updated/created by: {}'.format(
timezone.now(),
self.form.name,
self.user.username
))
super(Input, self).save(*args, **kwargs)
class Approval(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None)
input = models.ForeignKey(Input, on_delete=models.CASCADE, default=None)
class Meta:
ordering = ['-created_at']
class Comment(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None)
input = models.ForeignKey(Input, on_delete=models.CASCADE, default=None)
text = models.CharField(max_length=2048)
class Meta:
ordering = ['-created_at']
class Attachment(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(User, on_delete=models.CASCADE, default=None)
input = models.ForeignKey(Input, on_delete=models.CASCADE, default=None)
upload = models.FileField(blank=True, null=True, upload_to='%Y/%m/')
class Meta:
ordering = ['created_at']
| 28.892157 | 76 | 0.664744 |
acf4eea3d4ebb288fd60d22e62e967d997c72f6f | 14,911 | py | Python | gnuradio-3.7.13.4/grc/core/Port.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | 1 | 2021-03-09T07:32:37.000Z | 2021-03-09T07:32:37.000Z | gnuradio-3.7.13.4/grc/core/Port.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | null | null | null | gnuradio-3.7.13.4/grc/core/Port.py | v1259397/cosmic-gnuradio | 64c149520ac6a7d44179c3f4a38f38add45dd5dc | [
"BSD-3-Clause"
] | null | null | null | """
Copyright 2008-2017 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from itertools import chain
from .Constants import DEFAULT_DOMAIN, GR_STREAM_DOMAIN, GR_MESSAGE_DOMAIN
from .Element import Element
from . import Constants
class LoopError(Exception):
pass
def _upstream_ports(port):
if port.is_sink:
return _sources_from_virtual_sink_port(port)
else:
return _sources_from_virtual_source_port(port)
def _sources_from_virtual_sink_port(sink_port, _traversed=None):
"""
Resolve the source port that is connected to the given virtual sink port.
Use the get source from virtual source to recursively resolve subsequent ports.
"""
source_ports_per_virtual_connection = (
# there can be multiple ports per virtual connection
_sources_from_virtual_source_port(c.get_source(), _traversed) # type: list
for c in sink_port.get_enabled_connections()
)
return list(chain(*source_ports_per_virtual_connection)) # concatenate generated lists of ports
def _sources_from_virtual_source_port(source_port, _traversed=None):
"""
Recursively resolve source ports over the virtual connections.
Keep track of traversed sources to avoid recursive loops.
"""
_traversed = set(_traversed or []) # a new set!
if source_port in _traversed:
raise LoopError('Loop found when resolving port type')
_traversed.add(source_port)
block = source_port.get_parent()
flow_graph = block.get_parent()
if not block.is_virtual_source():
return [source_port] # nothing to resolve, we're done
stream_id = block.get_param('stream_id').get_value()
# currently the validation does not allow multiple virtual sinks and one virtual source
# but in the future it may...
connected_virtual_sink_blocks = (
b for b in flow_graph.get_enabled_blocks()
if b.is_virtual_sink() and b.get_param('stream_id').get_value() == stream_id
)
source_ports_per_virtual_connection = (
_sources_from_virtual_sink_port(b.get_sinks()[0], _traversed) # type: list
for b in connected_virtual_sink_blocks
)
return list(chain(*source_ports_per_virtual_connection)) # concatenate generated lists of ports
def _downstream_ports(port):
if port.is_source:
return _sinks_from_virtual_source_port(port)
else:
return _sinks_from_virtual_sink_port(port)
def _sinks_from_virtual_source_port(source_port, _traversed=None):
"""
Resolve the sink port that is connected to the given virtual source port.
Use the get sink from virtual sink to recursively resolve subsequent ports.
"""
sink_ports_per_virtual_connection = (
# there can be multiple ports per virtual connection
_sinks_from_virtual_sink_port(c.get_sink(), _traversed) # type: list
for c in source_port.get_enabled_connections()
)
return list(chain(*sink_ports_per_virtual_connection)) # concatenate generated lists of ports
def _sinks_from_virtual_sink_port(sink_port, _traversed=None):
"""
Recursively resolve sink ports over the virtual connections.
Keep track of traversed sinks to avoid recursive loops.
"""
_traversed = set(_traversed or []) # a new set!
if sink_port in _traversed:
raise LoopError('Loop found when resolving port type')
_traversed.add(sink_port)
block = sink_port.get_parent()
flow_graph = block.get_parent()
if not block.is_virtual_sink():
return [sink_port]
stream_id = block.get_param('stream_id').get_value()
connected_virtual_source_blocks = (
b for b in flow_graph.get_enabled_blocks()
if b.is_virtual_source() and b.get_param('stream_id').get_value() == stream_id
)
sink_ports_per_virtual_connection = (
_sinks_from_virtual_source_port(b.get_sources()[0], _traversed) # type: list
for b in connected_virtual_source_blocks
)
return list(chain(*sink_ports_per_virtual_connection)) # concatenate generated lists of ports
class Port(Element):
is_port = True
def __init__(self, block, n, dir):
"""
Make a new port from nested data.
Args:
block: the parent element
n: the nested odict
dir: the direction
"""
self._n = n
if n['type'] == 'message':
n['domain'] = GR_MESSAGE_DOMAIN
if 'domain' not in n:
n['domain'] = DEFAULT_DOMAIN
elif n['domain'] == GR_MESSAGE_DOMAIN:
n['key'] = n['name']
n['type'] = 'message' # For port color
if n['type'] == 'msg':
n['key'] = 'msg'
if not n.find('key'):
n['key'] = str(next(block.port_counters[dir == 'source']))
# Build the port
Element.__init__(self, block)
# Grab the data
self._name = n['name']
self._key = n['key']
self._type = n['type'] or ''
self._domain = n['domain']
self._hide = n.find('hide') or ''
self._dir = dir
self._hide_evaluated = False # Updated on rewrite()
self._nports = n.find('nports') or ''
self._vlen = n.find('vlen') or ''
self._optional = n.find('optional') or ''
self._optional_evaluated = False # Updated on rewrite()
self._clones = [] # References to cloned ports (for nports > 1)
def __str__(self):
if self.is_source:
return 'Source - {0}({1})'.format(self.get_name(), self.get_key())
if self.is_sink:
return 'Sink - {0}({1})'.format(self.get_name(), self.get_key())
def get_types(self):
return Constants.TYPE_TO_SIZEOF.keys()
def is_type_empty(self):
return not self._n['type'] or not self.get_parent().resolve_dependencies(self._n['type'])
def validate(self):
if self.get_type() not in self.get_types():
self.add_error_message('Type "{0}" is not a possible type.'.format(self.get_type()))
platform = self.get_parent().get_parent().get_parent()
if self.get_domain() not in platform.domains:
self.add_error_message('Domain key "{0}" is not registered.'.format(self.get_domain()))
if not self.get_enabled_connections() and not self.get_optional():
self.add_error_message('Port is not connected.')
# Message port logic
if self.get_type() == 'msg':
if self.get_nports():
self.add_error_message('A port of type "msg" cannot have "nports" set.')
if self.get_vlen() != 1:
self.add_error_message('A port of type "msg" must have a "vlen" of 1.')
def rewrite(self):
"""
Handle the port cloning for virtual blocks.
"""
del self._error_messages[:]
if self.is_type_empty():
self.resolve_empty_type()
hide = self.get_parent().resolve_dependencies(self._hide).strip().lower()
self._hide_evaluated = False if hide in ('false', 'off', '0') else bool(hide)
optional = self.get_parent().resolve_dependencies(self._optional).strip().lower()
self._optional_evaluated = False if optional in ('false', 'off', '0') else bool(optional)
# Update domain if was deduced from (dynamic) port type
type_ = self.get_type()
if self._domain == GR_STREAM_DOMAIN and type_ == "message":
self._domain = GR_MESSAGE_DOMAIN
self._key = self._name
if self._domain == GR_MESSAGE_DOMAIN and type_ != "message":
self._domain = GR_STREAM_DOMAIN
self._key = '0' # Is rectified in rewrite()
def resolve_virtual_source(self):
"""Only used by Generator after validation is passed"""
return _upstream_ports(self)
def resolve_empty_type(self):
def find_port(finder):
try:
return next((p for p in finder(self) if not p.is_type_empty()), None)
except LoopError as error:
self.add_error_message(str(error))
except (StopIteration, Exception) as error:
pass
try:
port = find_port(_upstream_ports) or find_port(_downstream_ports)
self._type = str(port.get_type())
self._vlen = str(port.get_vlen())
except Exception:
# Reset type and vlen
self._type = self._vlen = ''
def get_vlen(self):
"""
Get the vector length.
If the evaluation of vlen cannot be cast to an integer, return 1.
Returns:
the vector length or 1
"""
vlen = self.get_parent().resolve_dependencies(self._vlen)
try:
return int(self.get_parent().get_parent().evaluate(vlen))
except:
return 1
def get_nports(self):
"""
Get the number of ports.
If already blank, return a blank
If the evaluation of nports cannot be cast to a positive integer, return 1.
Returns:
the number of ports or 1
"""
if self._nports == '':
return ''
nports = self.get_parent().resolve_dependencies(self._nports)
try:
return max(1, int(self.get_parent().get_parent().evaluate(nports)))
except:
return 1
def get_optional(self):
return self._optional_evaluated
def get_color(self):
"""
Get the color that represents this port's type.
Codes differ for ports where the vec length is 1 or greater than 1.
Returns:
a hex color code.
"""
try:
color = Constants.TYPE_TO_COLOR[self.get_type()]
vlen = self.get_vlen()
if vlen == 1:
return color
color_val = int(color[1:], 16)
r = (color_val >> 16) & 0xff
g = (color_val >> 8) & 0xff
b = (color_val >> 0) & 0xff
dark = (0, 0, 30, 50, 70)[min(4, vlen)]
r = max(r-dark, 0)
g = max(g-dark, 0)
b = max(b-dark, 0)
# TODO: Change this to .format()
return '#%.2x%.2x%.2x' % (r, g, b)
except:
return '#FFFFFF'
def get_clones(self):
"""
Get the clones of this master port (nports > 1)
Returns:
a list of ports
"""
return self._clones
def add_clone(self):
"""
Create a clone of this (master) port and store a reference in self._clones.
The new port name (and key for message ports) will have index 1... appended.
If this is the first clone, this (master) port will get a 0 appended to its name (and key)
Returns:
the cloned port
"""
# Add index to master port name if there are no clones yet
if not self._clones:
self._name = self._n['name'] + '0'
# Also update key for none stream ports
if not self._key.isdigit():
self._key = self._name
# Prepare a copy of the odict for the clone
n = self._n.copy()
# Remove nports from the key so the copy cannot be a duplicator
if 'nports' in n:
n.pop('nports')
n['name'] = self._n['name'] + str(len(self._clones) + 1)
# Dummy value 99999 will be fixed later
n['key'] = '99999' if self._key.isdigit() else n['name']
# Clone
port = self.__class__(self.get_parent(), n, self._dir)
self._clones.append(port)
return port
def remove_clone(self, port):
"""
Remove a cloned port (from the list of clones only)
Remove the index 0 of the master port name (and key9 if there are no more clones left
"""
self._clones.remove(port)
# Remove index from master port name if there are no more clones
if not self._clones:
self._name = self._n['name']
# Also update key for none stream ports
if not self._key.isdigit():
self._key = self._name
def get_name(self):
number = ''
if self.get_type() == 'bus':
busses = filter(lambda a: a._dir == self._dir, self.get_parent().get_ports_gui())
number = str(busses.index(self)) + '#' + str(len(self.get_associated_ports()))
return self._name + number
def get_key(self):
return self._key
@property
def is_sink(self):
return self._dir == 'sink'
@property
def is_source(self):
return self._dir == 'source'
def get_type(self):
return self.get_parent().resolve_dependencies(self._type)
def get_domain(self):
return self._domain
def get_hide(self):
return self._hide_evaluated
def get_connections(self):
"""
Get all connections that use this port.
Returns:
a list of connection objects
"""
connections = self.get_parent().get_parent().connections
connections = filter(lambda c: c.get_source() is self or c.get_sink() is self, connections)
return connections
def get_enabled_connections(self):
"""
Get all enabled connections that use this port.
Returns:
a list of connection objects
"""
return filter(lambda c: c.get_enabled(), self.get_connections())
def get_associated_ports(self):
if not self.get_type() == 'bus':
return [self]
else:
if self.is_source:
get_ports = self.get_parent().get_sources
bus_structure = self.get_parent().current_bus_structure['source']
else:
get_ports = self.get_parent().get_sinks
bus_structure = self.get_parent().current_bus_structure['sink']
ports = [i for i in get_ports() if not i.get_type() == 'bus']
if bus_structure:
busses = [i for i in get_ports() if i.get_type() == 'bus']
bus_index = busses.index(self)
ports = filter(lambda a: ports.index(a) in bus_structure[bus_index], ports)
return ports
| 35.250591 | 100 | 0.618134 |
acf4eebf27d6fbda446f68f325ff80adfa4614d6 | 3,297 | py | Python | cli/kubernetes_helper.py | jsteemann/database-performance-comparison | 5da2e97b18cdf9ddd935d3007a3ff6c8ea84b238 | [
"Apache-2.0"
] | null | null | null | cli/kubernetes_helper.py | jsteemann/database-performance-comparison | 5da2e97b18cdf9ddd935d3007a3ff6c8ea84b238 | [
"Apache-2.0"
] | null | null | null | cli/kubernetes_helper.py | jsteemann/database-performance-comparison | 5da2e97b18cdf9ddd935d3007a3ff6c8ea84b238 | [
"Apache-2.0"
] | null | null | null | import socket
from kubernetes import client, config
from kubernetes.stream import portforward
class Kubernetes:
def __init__(self):
config.load_kube_config()
self.api_instance = client.CoreV1Api()
def find_pod(self, namespace, label_key, label_value):
for pod in self.api_instance.list_namespaced_pod(namespace).items:
labels = pod.metadata.labels
if labels and labels.get(label_key) == label_value:
return pod.metadata.name
def patch_socket(self):
"""Taken from https://github.com/kubernetes-client/python/blob/master/examples/pod_portforward.py and adapted
"""
socket_create_connection = socket.create_connection
def kubernetes_create_connection(address, *args, **kwargs):
dns_name = address[0]
if isinstance(dns_name, bytes):
dns_name = dns_name.decode()
dns_name = dns_name.split(".")
if dns_name[-1] != 'kubernetes':
return socket_create_connection(address, *args, **kwargs)
if len(dns_name) not in (3, 4):
raise RuntimeError("Unexpected kubernetes DNS name.")
namespace = dns_name[-2]
name = dns_name[0]
port = address[1]
if len(dns_name) == 4:
if dns_name[1] in ('svc', 'service'):
service = self.api_instance.read_namespaced_service(name, namespace)
for service_port in service.spec.ports:
if service_port.port == port:
port = service_port.target_port
break
else:
raise RuntimeError(
"Unable to find service port: %s" % port)
label_selector = []
for key, value in service.spec.selector.items():
label_selector.append("%s=%s" % (key, value))
pods = self.api_instance.list_namespaced_pod(
namespace, label_selector=",".join(label_selector)
)
if not pods.items:
raise RuntimeError("Unable to find service pods.")
name = pods.items[0].metadata.name
if isinstance(port, str):
for container in pods.items[0].spec.containers:
for container_port in container.ports:
if container_port.name == port:
port = container_port.container_port
break
else:
continue
break
else:
raise RuntimeError("Unable to find service port name: %s" % port)
elif dns_name[1] != 'pod':
raise RuntimeError("Unsupported resource type: %s" % dns_name[1])
pf = portforward(self.api_instance.connect_get_namespaced_pod_portforward,
name, namespace, ports=str(port))
return pf.socket(port)
socket.create_connection = kubernetes_create_connection
| 46.43662 | 117 | 0.525629 |
acf4eee4345921d805e283a6e29b6711dcf0cd2b | 1,633 | py | Python | preprocess/show_bb.py | bokutotu/yolov1 | 4686c37543c0eb74831a19e8f51196ba36a5b38f | [
"MIT"
] | null | null | null | preprocess/show_bb.py | bokutotu/yolov1 | 4686c37543c0eb74831a19e8f51196ba36a5b38f | [
"MIT"
] | null | null | null | preprocess/show_bb.py | bokutotu/yolov1 | 4686c37543c0eb74831a19e8f51196ba36a5b38f | [
"MIT"
] | null | null | null | from PIL import Image
from PIL import ImageDraw
from matplotlib import pyplot as plt
from pathlib import Path
import pandas as pd
def show_bb(img, x, y, w, h, text="ans", textcolor=(255,255,255), bbcolor=(255,0,0)):
draw = ImageDraw.Draw(img)
text_w, text_h = draw.textsize(text)
label_y = y if y <= text_h else y - text_h
draw.rectangle((x, label_y, x+w, label_y+h), outline=bbcolor)
draw.rectangle((x, label_y, x+text_w, label_y+text_h), outline=bbcolor, fill=bbcolor)
draw.text((x, label_y), text, fill=textcolor)
def main():
df = pd.read_csv("test/annotation.csv")
img = Image.open("./test/0326-SR2-SH6T-0130-0230-1A.jpg")
tmp = df[df["filename"] == "0326-SR2-SH6T-0130-0230-1A.jpg"]
x = tmp["x_min"]; y = tmp["y_min"]; width = tmp["width"]; height = tmp["height"]
# img = Image.open("../RawData/val/0326-SR2-SH6T-0130-0230-1A.jpg")
# boxes = []
# with open("../pytorch-YOLO-v1/bt_im.txt") as f:
# lines = f.readlines()
# for line in lines:
# li_li = line.split(" ")
# if li_li[0] == "0326-SR2-SH6T-0130-0230-1A.jpg":
# li = li_li[1:-1]
# print(li)
# for i in range(0,len(li),5):
# b = []
# b.append(int(li[i]))
# b.append(int(li[i+1]))
# b.append(int(li[i+2]))
# b.append(int(li[i+3]))
# boxes.append(b)
for idx in range(len(x)):
show_bb(img, x[idx], y[idx], width[idx], height[idx], )
img.save("res.png")
if __name__ == "__main__":
main()
| 34.744681 | 89 | 0.545009 |
acf4ef8d2f60b317f37e09dc4fc3b6e2d2cafd87 | 194 | py | Python | module_4/tests/arraySum.py | szikszail/py-webinar | fafc0a5d06e459c8654de89ff5fedab0732d8744 | [
"MIT"
] | 1 | 2020-04-13T19:59:27.000Z | 2020-04-13T19:59:27.000Z | module_4/tests/arraySum.py | szikszail/py-webinar | fafc0a5d06e459c8654de89ff5fedab0732d8744 | [
"MIT"
] | null | null | null | module_4/tests/arraySum.py | szikszail/py-webinar | fafc0a5d06e459c8654de89ff5fedab0732d8744 | [
"MIT"
] | null | null | null | import unittest
from module_4.arraySum import arraySum
class ArraySumTest(unittest.TestCase):
def test_exist(self):
self.assertNotEqual(arraySum, None)
# TODO remaining tests
| 19.4 | 43 | 0.752577 |
acf4f0d3ae283197f36c22be872583d8ee0247ae | 4,188 | py | Python | dashio/mqttconnection.py | robmakie/dash.py | 5b0e4515be96224e0b34c1126eee9b17789d38d1 | [
"MIT"
] | null | null | null | dashio/mqttconnection.py | robmakie/dash.py | 5b0e4515be96224e0b34c1126eee9b17789d38d1 | [
"MIT"
] | null | null | null | dashio/mqttconnection.py | robmakie/dash.py | 5b0e4515be96224e0b34c1126eee9b17789d38d1 | [
"MIT"
] | null | null | null | import threading
import paho.mqtt.client as mqtt
import ssl
import logging
import zmq
import uuid
# TODO: Add documentation
class mqttConnection(threading.Thread):
"""Setups and manages a connection thread to the Dash Server."""
def __on_connect(self, client, userdata, flags, rc):
logging.debug("rc: %s", str(rc))
def __on_message(self, client, obj, msg):
data = str(msg.payload, "utf-8").strip()
logging.debug("DASH RX: %s", data)
self.tx_zmq_pub.send_multipart([self.b_connection_id, b'1', msg.payload])
def __on_publish(self, client, obj, mid):
pass
def __on_subscribe(self, client, obj, mid, granted_qos):
logging.debug("Subscribed: %s %s", str(mid), str(granted_qos))
def __on_log(self, client, obj, level, string):
logging.debug(string)
def add_device(self, device):
device.add_connection(self.connection_id)
control_topic = "{}/{}/control".format(self.username, device.device_id)
self.dash_c.subscribe(control_topic, 0)
def __init__(self, device_id, host, port, username="", password="", use_ssl=False, context=None):
"""
Arguments:
host {str} -- The server name of the mqtt host.
port {int} -- Port number to connect to.
username {str} -- username for the mqtt connection.
password {str} -- password for the mqtt connection.
Keyword Arguments:
use_ssl {bool} -- Whether to use ssl for the connection or not. (default: {False})
"""
threading.Thread.__init__(self, daemon=True)
self.context = context or zmq.Context.instance()
self.connection_id = uuid.uuid4()
self.b_connection_id = self.connection_id.bytes
self.LWD = "OFFLINE"
self.running = True
self.username = username
self.mqttc = mqtt.Client()
# Assign event callbacks
self.mqttc.on_message = self.__on_message
self.mqttc.on_connect = self.__on_connect
self.mqttc.on_publish = self.__on_publish
self.mqttc.on_subscribe = self.__on_subscribe
if use_ssl:
self.mqttc.tls_set(
ca_certs=None,
certfile=None,
keyfile=None,
cert_reqs=ssl.CERT_REQUIRED,
tls_version=ssl.PROTOCOL_TLSv1_2,
ciphers=None,
)
self.mqttc.tls_insecure_set(False)
self.mqttc.on_log = self.__on_log
self.mqttc.will_set(self.data_topic, self.LWD, qos=1, retain=False)
# Connect
if username and password:
self.mqttc.username_pw_set(username, password)
self.mqttc.connect(host, port)
# Start subscribe, with QoS level 0
self.start()
def run(self):
self.mqttc.loop_start()
tx_url_internal = "inproc://TX_{}".format(self.connection_id.hex)
rx_url_internal = "inproc://RX_{}".format(self.connection_id.hex)
self.tx_zmq_pub = self.context.socket(zmq.PUB)
self.tx_zmq_pub.bind(tx_url_internal)
rx_zmq_sub = self.context.socket(zmq.SUB)
rx_zmq_sub.bind(rx_url_internal)
# Subscribe on ALL, and my connection
rx_zmq_sub.setsockopt(zmq.SUBSCRIBE, b"ALL")
rx_zmq_sub.setsockopt(zmq.SUBSCRIBE, b"ALARM")
rx_zmq_sub.setsockopt(zmq.SUBSCRIBE, self.b_connection_id)
poller = zmq.Poller()
poller.register(rx_zmq_sub, zmq.POLLIN)
while self.running:
try:
socks = dict(poller.poll(50))
except zmq.error.ContextTerminated:
break
if rx_zmq_sub in socks:
[address, id, data] = rx_zmq_sub.recv_multipart()
msg_l = data.split(b'\t')
device_id = msg_l[1].decode('utf-8').strip()
logging.debug("%s TX: %s", self.b_connection_id.decode('utf-8'), data.decode('utf-8').rstrip())
data_topic = "{}/{}/data".format(self.username, device_id)
self.mqttc.publish(data_topic, data)
self.mqttc.loop_stop()
self.tx_zmq_pub.close()
rx_zmq_sub.close()
| 34.61157 | 111 | 0.612703 |
acf4f0e916794a5f6e050c1b0db445131f96479a | 1,595 | py | Python | africanus/model/coherency/cuda/tests/test_convert.py | JoshVStaden/codex-africanus | 4a38994431d51510b1749fa0e4b8b6190b8b530f | [
"BSD-3-Clause"
] | 13 | 2018-04-06T09:36:13.000Z | 2021-04-13T13:11:00.000Z | africanus/model/coherency/cuda/tests/test_convert.py | JoshVStaden/codex-africanus | 4a38994431d51510b1749fa0e4b8b6190b8b530f | [
"BSD-3-Clause"
] | 153 | 2018-03-28T14:13:48.000Z | 2022-02-03T07:49:17.000Z | africanus/model/coherency/cuda/tests/test_convert.py | JoshVStaden/codex-africanus | 4a38994431d51510b1749fa0e4b8b6190b8b530f | [
"BSD-3-Clause"
] | 14 | 2018-03-29T13:30:52.000Z | 2021-06-12T02:56:55.000Z | # -*- coding: utf-8 -*-
import numpy as np
import pytest
from africanus.model.coherency import convert as np_convert
from africanus.model.coherency.cuda import convert
from africanus.model.coherency.tests.test_convert import (
stokes_corr_cases,
stokes_corr_int_cases,
visibility_factory,
vis_shape)
@pytest.mark.skip
def test_stokes_schemas(in_type, input_schema,
out_type, output_schema,
vis_shape):
input_shape = np.asarray(input_schema).shape
output_shape = np.asarray(output_schema).shape
vis = visibility_factory(vis_shape, input_shape, in_type)
xformed_vis = np_convert(vis, input_schema, output_schema)
assert xformed_vis.shape == vis_shape + output_shape
@pytest.mark.parametrize("in_type, input_schema, out_type, output_schema",
stokes_corr_cases + stokes_corr_int_cases)
@pytest.mark.parametrize("vis_shape", vis_shape)
def test_cuda_convert(in_type, input_schema,
out_type, output_schema,
vis_shape):
cp = pytest.importorskip('cupy')
input_shape = np.asarray(input_schema).shape
vis = visibility_factory(vis_shape, input_shape, in_type)
cp_out = convert(cp.asarray(vis), input_schema, output_schema)
np_out = np_convert(vis, input_schema, output_schema)
np.testing.assert_array_almost_equal(cp.asnumpy(cp_out), np_out)
| 37.093023 | 74 | 0.635737 |
acf4f18901abb3002fbf9d8f5dbbf125cb04a822 | 683 | py | Python | aiomatrix/types/responses/__init__.py | Forden/aiomatrix | d258076bae8eb776495b92be46ee9f4baec8d9a6 | [
"MIT"
] | 2 | 2021-10-29T18:07:08.000Z | 2021-11-19T00:25:43.000Z | aiomatrix/types/responses/__init__.py | Forden/aiomatrix | d258076bae8eb776495b92be46ee9f4baec8d9a6 | [
"MIT"
] | 1 | 2022-03-06T11:17:43.000Z | 2022-03-06T11:17:43.000Z | aiomatrix/types/responses/__init__.py | Forden/aiomatrix | d258076bae8eb776495b92be46ee9f4baec8d9a6 | [
"MIT"
] | null | null | null | from .content_repository import ContentRepositoryConfig, UploadedFileResponse
from .create_room import CreateRoomResponse
from .listing_rooms import RoomVisibilityResponse, ServerPublicRoomsResponse
from .login_response import LoginResponse
from .login_types import SupportedLoginTypes
from .room_aliases import GetRoomAliasesResponse, ResolveRoomAliasResponse
from .room_members import RoomMembersResponse
from .room_membership import UserJoinRoomResponse, UserJoinedRoomsResponse
from .room_messages import RoomMessagesResponse
from .server_capabilities import ServerCapabilitiesResponse
from .sync import SentEventResponse, SyncResponse
from .whoamiresponse import WhoAmIResponse
| 52.538462 | 77 | 0.897511 |
acf4f1989a8f3afe58b4235a5b0bc896959f1cf0 | 1,005 | py | Python | cngi/image/sdfixscan.py | wxiongccnu1990/cngi_prototype | 7a7230485acc9f8f2be534a832522339153d521e | [
"Apache-2.0"
] | null | null | null | cngi/image/sdfixscan.py | wxiongccnu1990/cngi_prototype | 7a7230485acc9f8f2be534a832522339153d521e | [
"Apache-2.0"
] | null | null | null | cngi/image/sdfixscan.py | wxiongccnu1990/cngi_prototype | 7a7230485acc9f8f2be534a832522339153d521e | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 AUI, Inc. Washington DC, USA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
########################
def sdfixscan(xds):
"""
.. todo::
This function is not yet implemented
Remove the striping pattern generated along the scan direction in raster scan data
Parameters
----------
xds : xarray.core.dataset.Dataset
input Image
Returns
-------
xarray.core.dataset.Dataset
output Image
"""
return True
| 28.714286 | 86 | 0.665672 |
acf4f1b8da638fc50ca417b438b92d64786a935a | 6,088 | py | Python | optic_store/optic_store/report/item_consumption/item_consumption.py | iptelephony/optic_store | 9c5e6e2f7170d18ecac8dd53133a62a3250cd834 | [
"MIT"
] | 14 | 2019-05-14T09:33:58.000Z | 2022-03-19T14:43:36.000Z | optic_store/optic_store/report/item_consumption/item_consumption.py | iptelephony/optic_store | 9c5e6e2f7170d18ecac8dd53133a62a3250cd834 | [
"MIT"
] | 18 | 2019-03-22T19:51:22.000Z | 2020-08-04T13:57:27.000Z | optic_store/optic_store/report/item_consumption/item_consumption.py | iptelephony/optic_store | 9c5e6e2f7170d18ecac8dd53133a62a3250cd834 | [
"MIT"
] | 25 | 2019-05-15T08:31:17.000Z | 2022-02-25T07:21:18.000Z | # Copyright (c) 2013, 9t9it and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import today
from functools import partial, reduce
import operator
from toolz import merge, pluck, get, compose, first, flip, groupby, excepts, keyfilter
from optic_store.utils.helpers import generate_intervals
from optic_store.utils.report import make_column, with_report_generation_time
def execute(filters=None):
args = _get_args(filters)
columns = _get_columns(args)
data = _get_data(args, columns)
pick_column_fields = partial(
keyfilter,
lambda k: k in ["label", "fieldname", "fieldtype", "options", "width"],
)
return [pick_column_fields(x) for x in columns], data
def _get_args(filters={}):
if not filters.get("company"):
frappe.throw(_("Company is required to generate report"))
return merge(
filters,
{
"price_list": frappe.db.get_value(
"Buying Settings", None, "buying_price_list"
),
"start_date": filters.get("start_date") or today(),
"end_date": filters.get("end_date") or today(),
},
)
def _get_columns(args):
columns = [
make_column("item_code", type="Link", options="Item"),
make_column("brand", type="Link", options="Brand"),
make_column("item_name", width=200),
make_column("supplier", type="Link", options="Supplier"),
make_column(
"price",
args.get("price_list", "Standard Buying Price"),
type="Currency",
width=90,
),
make_column("stock", "Available Stock", type="Float", width=90),
]
intervals = compose(
list,
partial(
map,
lambda x: merge(
x, make_column(x.get("key"), x.get("label"), type="Float", width=90)
),
),
generate_intervals,
)
return (
columns
+ intervals(args.get("interval"), args.get("start_date"), args.get("end_date"))
+ [make_column("total_consumption", type="Float", width=90)]
)
def _get_data(args, columns):
warehouse_conditions = (
"warehouse = %(warehouse)s"
if args.get("warehouse")
else (
"warehouse IN (SELECT name FROM `tabWarehouse` WHERE company = %(company)s)"
)
)
items = frappe.db.sql(
"""
SELECT
i.item_code AS item_code,
i.brand AS brand,
i.item_name AS item_name,
id.default_supplier AS supplier,
p.price_list_rate AS price,
b.actual_qty AS stock
FROM `tabItem` AS i
LEFT JOIN `tabItem Price` AS p
ON p.item_code = i.item_code AND p.price_list = %(price_list)s
LEFT JOIN (
SELECT
item_code, SUM(actual_qty) AS actual_qty
FROM `tabBin`
WHERE {warehouse_conditions}
GROUP BY item_code
) AS b
ON b.item_code = i.item_code
LEFT JOIN `tabItem Default` AS id
ON id.parent = i.name AND id.company = %(company)s
""".format(
warehouse_conditions=warehouse_conditions
),
values={
"price_list": args.get("price_list"),
"company": args.get("company"),
"warehouse": args.get("warehouse"),
},
as_dict=1,
)
sles = frappe.db.sql(
"""
SELECT item_code, posting_date, actual_qty
FROM `tabStock Ledger Entry`
WHERE docstatus < 2 AND
voucher_type = 'Sales Invoice' AND
company = %(company)s AND
{warehouse_conditions} AND
posting_date BETWEEN %(start_date)s AND %(end_date)s
""".format(
warehouse_conditions=warehouse_conditions
),
values={
"company": args.get("company"),
"warehouse": args.get("warehouse"),
"start_date": args.get("start_date"),
"end_date": args.get("end_date"),
},
as_dict=1,
)
keys = compose(list, partial(pluck, "fieldname"))(columns)
periods = list(filter(lambda x: x.get("start_date") and x.get("end_date"), columns))
set_consumption = _set_consumption(sles, periods)
make_row = compose(partial(keyfilter, lambda k: k in keys), set_consumption)
return with_report_generation_time([make_row(x) for x in items], keys)
def _set_consumption(sles, periods):
def groupby_filter(sl):
def fn(p):
return p.get("start_date") <= sl.get("posting_date") <= p.get("end_date")
return fn
groupby_fn = compose(
partial(get, "key", default=None),
excepts(StopIteration, first, lambda __: {}),
partial(flip, filter, periods),
groupby_filter,
)
sles_grouped = groupby(groupby_fn, sles)
summer = compose(operator.neg, sum, partial(pluck, "actual_qty"))
def seg_filter(x):
return lambda sl: sl.get("item_code") == x
segregator_fns = [
merge(
x,
{
"seger": compose(
summer,
partial(flip, filter, get(x.get("key"), sles_grouped, [])),
seg_filter,
)
},
)
for x in periods
]
def seg_reducer(item_code):
def fn(a, p):
key = get("key", p, None)
seger = get("seger", p, lambda __: None)
return merge(a, {key: seger(item_code)})
return fn
total_fn = compose(summer, partial(flip, filter, sles), seg_filter)
def fn(item):
item_code = item.get("item_code")
return merge(
item,
reduce(seg_reducer(item_code), segregator_fns, {}),
{"total_consumption": total_fn(item_code)},
)
return fn
| 30.903553 | 88 | 0.558147 |
acf4f2414a4def14253a90ea2d4c1e7f48602d89 | 2,190 | py | Python | cftracker/detectron2_detector/detetron2_test.py | leander-dl-fang/UAVTracking | 6b7f737fd0cbe172d68875850b052d669485dd40 | [
"MIT"
] | 1 | 2021-04-20T20:37:34.000Z | 2021-04-20T20:37:34.000Z | cftracker/detectron2_detector/detetron2_test.py | leander-dl-fang/UAVTracking | 6b7f737fd0cbe172d68875850b052d669485dd40 | [
"MIT"
] | null | null | null | cftracker/detectron2_detector/detetron2_test.py | leander-dl-fang/UAVTracking | 6b7f737fd0cbe172d68875850b052d669485dd40 | [
"MIT"
] | null | null | null | # Some basic setup:
# Setup detectron2 logger
import detectron2
from detectron2.utils.logger import setup_logger
setup_logger()
# import some common libraries
import numpy as np
import os, json, cv2, random
# import some common detectron2 utilities
from detectron2 import model_zoo
from detectron2.engine import DefaultPredictor
from detectron2.config import get_cfg
from detectron2.utils.visualizer import Visualizer
from detectron2.data import MetadataCatalog, DatasetCatalog
import os
module_path = os.path.abspath(__file__)
dir_path = os.path.dirname(module_path)
# import torch.multiprocessing as mp
# mp.set_start_method('spawn')
def faster_rcnn_detection(image_name):
# cftracker\detectron2_detector\person23_000292.jpg
image_path = os.path.join(dir_path,"to_detect/"+image_name)
print("image_path",image_path)
im = cv2.imread(image_path)
# Inference with a keypoint detection model
cfg = get_cfg() # get a fresh new config
cfg.merge_from_file(model_zoo.get_config_file("COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml"))
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.7 # set threshold for this model
cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url("COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x.yaml")
predictor = DefaultPredictor(cfg)
outputs = predictor(im)
# print(outputs['instances'].get("pred_boxes").tensor.cpu().numpy().tolist()==[])
if(outputs['instances'].get("pred_boxes").tensor.cpu().numpy().tolist()==[]):
return None
else:
center = outputs['instances'].get("pred_boxes").get_centers().cpu().numpy()[0].tolist()
return center[0],center[1]
# print("outputs['instances'].get('pred_boxes').nonempty().cpu()")
# print(outputs['instances'].get("pred_boxes").nonempty().cpu())
# print("outputs['instances'].get('pred_boxes').nonempty().cpu()")
# print("outputs['instances'].get('pred_boxes').tensor.cpu().numpy()",outputs['instances'].get('pred_boxes').tensor.cpu().numpy())
#Boxes(tensor([], device='cuda:0', size=(0, 4)))
if __name__ == "__main__":
image_name = "person8_000935.jpg"
print(faster_rcnn_detection(image_name))
# print(float("hello"))
| 39.107143 | 134 | 0.730594 |
acf4f46ad7cc51423a52c8ae865e5226b8789cae | 2,246 | py | Python | srcs/python/kungfu/tensorflow/v1/helpers/idx.py | Pandinosaurus/KungFu | 80dfa463450330e920b413f65cc49d8e013b84a9 | [
"Apache-2.0"
] | 291 | 2019-10-25T16:37:59.000Z | 2022-03-17T21:47:09.000Z | srcs/python/kungfu/tensorflow/v1/helpers/idx.py | Pandinosaurus/KungFu | 80dfa463450330e920b413f65cc49d8e013b84a9 | [
"Apache-2.0"
] | 56 | 2019-10-26T08:25:33.000Z | 2021-09-07T11:11:51.000Z | srcs/python/kungfu/tensorflow/v1/helpers/idx.py | Pandinosaurus/KungFu | 80dfa463450330e920b413f65cc49d8e013b84a9 | [
"Apache-2.0"
] | 53 | 2019-10-25T17:45:40.000Z | 2022-02-08T13:09:39.000Z | """
idx implements the IDX file format defined in
http://yann.lecun.com/exdb/mnist/
"""
import io
import sys
import tarfile
from struct import pack, unpack
import numpy as np
_idx_2_np = [
(0x8, np.uint8),
(0x9, np.int8),
(0xb, np.int16),
(0xc, np.int32),
(0xd, np.float32),
(0xe, np.float64),
]
def _get_idx_type(np_type):
for idx_t, np_t in _idx_2_np:
if np_t == np_type:
return idx_t
raise ValueError('unsupported np_type %s' % np_type)
def _get_np_type(idx_type):
for idx_t, np_t in _idx_2_np:
if idx_t == idx_type:
return np_t
raise ValueError('unsupported idx_type %s' % idx_type)
def write_idx_header(f, a):
f.write(pack('BBBB', 0, 0, _get_idx_type(a.dtype), len(a.shape)))
for dim in a.shape:
# https://docs.python.org/3/library/struct.html#format-characters
f.write(pack('>I', dim))
def write_idx_to(f, a):
write_idx_header(f, a)
f.write(a.tobytes())
def write_idx_file(name, a):
with open(name, 'wb') as f:
write_idx_to(f, a)
def read_idx_header(f):
magic = f.read(4) # [0, 0, dtype, rank]
_, _, dtype, rank = magic
if sys.version_info.major == 2:
dtype = ord(dtype)
rank = ord(rank)
# https://docs.python.org/3/library/struct.html#format-characters
dims = [unpack('>I', f.read(4))[0] for _ in range(rank)]
return dtype, dims
def read_idx_from(f):
dtype, dims = read_idx_header(f)
return np.ndarray(dims, _get_np_type(dtype), f.read())
def read_idx_file(name):
with open(name, 'rb') as f:
return read_idx_from(f)
def _infer_out_filename(in_file):
name = str(in_file)
if name.endswith('.npz'):
name = name[:-4]
name += '.idx.tar'
return name
def npz2idxtar(in_file, out_file=None):
if out_file is None:
out_file = _infer_out_filename(in_file)
ws = np.load(in_file)
with tarfile.open(out_file, 'w') as tar:
for name in ws.files:
w = ws[name]
bs = io.BytesIO()
write_idx_to(bs, w)
info = tarfile.TarInfo(name)
info.size = len(bs.getvalue())
tar.addfile(info, io.BytesIO(bs.getbuffer()))
return out_file
| 23.395833 | 73 | 0.61309 |
acf4f616b061ac4c60db2cd2bf5e50f97d94b084 | 12,062 | py | Python | drf_haystack/query.py | CompileInc/drf-haystack | 32038771071d660cf85abe065646f1640f4a7035 | [
"MIT"
] | null | null | null | drf_haystack/query.py | CompileInc/drf-haystack | 32038771071d660cf85abe065646f1640f4a7035 | [
"MIT"
] | null | null | null | drf_haystack/query.py | CompileInc/drf-haystack | 32038771071d660cf85abe065646f1640f4a7035 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import operator
import warnings
from itertools import chain
from django.utils import six
from django.utils.six.moves import zip
from dateutil import parser
from drf_haystack import constants
from drf_haystack.utils import merge_dict
class BaseQueryBuilder(object):
"""
Query builder base class.
"""
def __init__(self, backend, view):
self.backend = backend
self.view = view
def build_query(self, **filters):
"""
:param dict[str, list[str]] filters: is an expanded QueryDict or
a mapping of keys to a list of parameters.
"""
raise NotImplementedError("You should override this method in subclasses.")
@staticmethod
def tokenize(stream, separator):
"""
Tokenize and yield query parameter values.
:param stream: Input value
:param separator: Character to use to separate the tokens.
:return:
"""
for value in stream:
for token in value.split(separator):
if token:
yield token.strip()
class BoostQueryBuilder(BaseQueryBuilder):
"""
Query builder class for adding boost to queries.
"""
def build_query(self, **filters):
applicable_filters = None
query_param = getattr(self.backend, "query_param", None)
value = filters.pop(query_param, None)
if value:
try:
term, val = chain.from_iterable(zip(self.tokenize(value, self.view.lookup_sep)))
except ValueError:
raise ValueError("Cannot convert the '%s' query parameter to a valid boost filter."
% query_param)
else:
try:
applicable_filters = {"term": term, "boost": float(val)}
except ValueError:
raise ValueError("Cannot convert boost to float value. Make sure to provide a "
"numerical boost value.")
return applicable_filters
class FilterQueryBuilder(BaseQueryBuilder):
"""
Query builder class suitable for doing basic filtering.
"""
def __init__(self, backend, view):
super(FilterQueryBuilder, self).__init__(backend, view)
assert getattr(self.backend, "default_operator", None) in (operator.and_, operator.or_), (
"%(cls)s.default_operator must be either 'operator.and_' or 'operator.or_'." % {
"cls": self.backend.__class__.__name__
})
self.default_operator = self.backend.default_operator
def build_query(self, **filters):
"""
Creates a single SQ filter from querystring parameters that correspond to the SearchIndex fields
that have been "registered" in `view.fields`.
Default behavior is to `OR` terms for the same parameters, and `AND` between parameters. Any
querystring parameters that are not registered in `view.fields` will be ignored.
:param dict[str, list[str]] filters: is an expanded QueryDict or a mapping of keys to a list of
parameters.
"""
applicable_filters = []
applicable_exclusions = []
for param, value in filters.items():
# Skip if the parameter is not listed in the serializer's `fields`
# or if it's in the `exclude` list.
excluding_term = False
param_parts = param.split("__")
base_param = param_parts[0] # only test against field without lookup
negation_keyword = constants.DRF_HAYSTACK_NEGATION_KEYWORD
if len(param_parts) > 1 and param_parts[1] == negation_keyword:
excluding_term = True
param = param.replace("__%s" % negation_keyword, "") # haystack wouldn't understand our negation
if self.view.serializer_class:
if self.view.serializer_class.Meta.field_aliases:
old_base = base_param
base_param = self.view.serializer_class.Meta.field_aliases.get(base_param, base_param)
param = param.replace(old_base, base_param) # need to replace the alias
fields = self.view.serializer_class.Meta.fields
exclude = self.view.serializer_class.Meta.exclude
search_fields = self.view.serializer_class.Meta.search_fields
if ((fields or search_fields) and base_param not in chain(fields, search_fields)) or base_param in exclude or not value:
continue
field_queries = []
for token in self.tokenize(value, self.view.lookup_sep):
field_queries.append(self.view.query_object((param, token)))
field_queries = [fq for fq in field_queries if fq]
if len(field_queries) > 0:
term = six.moves.reduce(operator.or_, field_queries)
if excluding_term:
applicable_exclusions.append(term)
else:
applicable_filters.append(term)
applicable_filters = six.moves.reduce(
self.default_operator, filter(lambda x: x, applicable_filters)) if applicable_filters else []
applicable_exclusions = six.moves.reduce(
self.default_operator, filter(lambda x: x, applicable_exclusions)) if applicable_exclusions else []
return applicable_filters, applicable_exclusions
class FacetQueryBuilder(BaseQueryBuilder):
"""
Query builder class suitable for constructing faceted queries.
"""
def build_query(self, **filters):
"""
Creates a dict of dictionaries suitable for passing to the SearchQuerySet `facet`,
`date_facet` or `query_facet` method. All key word arguments should be wrapped in a list.
:param view: API View
:param dict[str, list[str]] filters: is an expanded QueryDict or a mapping
of keys to a list of parameters.
"""
field_facets = {}
date_facets = {}
query_facets = {}
facet_serializer_cls = self.view.get_facet_serializer_class()
if self.view.lookup_sep == ":":
raise AttributeError("The %(cls)s.lookup_sep attribute conflicts with the HaystackFacetFilter "
"query parameter parser. Please choose another `lookup_sep` attribute "
"for %(cls)s." % {"cls": self.view.__class__.__name__})
fields = facet_serializer_cls.Meta.fields
exclude = facet_serializer_cls.Meta.exclude
field_options = facet_serializer_cls.Meta.field_options
for field, options in filters.items():
if field not in fields or field in exclude:
continue
field_options = merge_dict(field_options, {field: self.parse_field_options(self.view.lookup_sep, *options)})
valid_gap = ("year", "month", "day", "hour", "minute", "second")
for field, options in field_options.items():
if any([k in options for k in ("start_date", "end_date", "gap_by", "gap_amount")]):
if not all(("start_date", "end_date", "gap_by" in options)):
raise ValueError("Date faceting requires at least 'start_date', 'end_date' "
"and 'gap_by' to be set.")
if not options["gap_by"] in valid_gap:
raise ValueError("The 'gap_by' parameter must be one of %s." % ", ".join(valid_gap))
options.setdefault("gap_amount", 1)
date_facets[field] = field_options[field]
else:
field_facets[field] = field_options[field]
return {
"date_facets": date_facets,
"field_facets": field_facets,
"query_facets": query_facets
}
def parse_field_options(self, *options):
"""
Parse the field options query string and return it as a dictionary.
"""
defaults = {}
for option in options:
if isinstance(option, six.text_type):
tokens = [token.strip() for token in option.split(self.view.lookup_sep)]
for token in tokens:
if not len(token.split(":")) == 2:
warnings.warn("The %s token is not properly formatted. Tokens need to be "
"formatted as 'token:value' pairs." % token)
continue
param, value = token.split(":", 1)
if any([k == param for k in ("start_date", "end_date", "gap_amount")]):
if param in ("start_date", "end_date"):
value = parser.parse(value)
if param == "gap_amount":
value = int(value)
defaults[param] = value
return defaults
class SpatialQueryBuilder(BaseQueryBuilder):
"""
Query builder class suitable for construction spatial queries.
"""
def __init__(self, backend, view):
super(SpatialQueryBuilder, self).__init__(backend, view)
assert getattr(self.backend, "point_field", None) is not None, (
"%(cls)s.point_field cannot be None. Set the %(cls)s.point_field "
"to the name of the `LocationField` you want to filter on your index class." % {
"cls": self.backend.__class__.__name__
})
try:
from haystack.utils.geo import D, Point
self.D = D
self.Point = Point
except ImportError:
warnings.warn("Make sure you've installed the `libgeos` library. "
"Run `apt-get install libgeos` on debian based linux systems, "
"or `brew install geos` on OS X.")
raise
def build_query(self, **filters):
"""
Build queries for geo spatial filtering.
Expected query parameters are:
- a `unit=value` parameter where the unit is a valid UNIT in the
`django.contrib.gis.measure.Distance` class.
- `from` which must be a comma separated latitude and longitude.
Example query:
/api/v1/search/?km=10&from=59.744076,10.152045
Will perform a `dwithin` query within 10 km from the point
with latitude 59.744076 and longitude 10.152045.
"""
applicable_filters = None
filters = dict((k, filters[k]) for k in chain(self.D.UNITS.keys(), ["from"]) if k in filters)
distance = dict((k, v) for k, v in filters.items() if k in self.D.UNITS.keys())
try:
latitude, longitude = map(float, self.tokenize(filters["from"], self.view.lookup_sep))
point = self.Point(longitude, latitude, srid=constants.GEO_SRID)
except ValueError:
raise ValueError("Cannot convert `from=latitude,longitude` query parameter to "
"float values. Make sure to provide numerical values only!")
except KeyError:
# If the user has not provided any `from` query string parameter,
# just return.
pass
else:
for unit in distance.keys():
if not len(distance[unit]) == 1:
raise ValueError("Each unit must have exactly one value.")
distance[unit] = float(distance[unit][0])
if point and distance:
applicable_filters = {
"dwithin": {
"field": self.backend.point_field,
"point": point,
"distance": self.D(**distance)
},
"distance": {
"field": self.backend.point_field,
"point": point
}
}
return applicable_filters
| 38.660256 | 136 | 0.583402 |
acf4f6283094fce389a2d890e7b37294709b2e52 | 7,502 | py | Python | media/sync_lightroom_catalogs.py | thejoltjoker/python | 4517065e64d83947648e2aad206ac4ec786b166e | [
"MIT"
] | null | null | null | media/sync_lightroom_catalogs.py | thejoltjoker/python | 4517065e64d83947648e2aad206ac4ec786b166e | [
"MIT"
] | null | null | null | media/sync_lightroom_catalogs.py | thejoltjoker/python | 4517065e64d83947648e2aad206ac4ec786b166e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
sync_lightroom_catalogs.py
Keep lightroom catalogs up to date on different volumes.
"""
import os
import logging
import hashlib
import pprint
import datetime
import json
import time
import argparse
from pathlib import Path
def setup_logger():
# Setup logger
# logger = logging.getLogger(__name__)
# handler = logging.StreamHandler()
logging.basicConfig(level=logging.INFO, format='%(name)-10s %(levelname)8s %(message)s')
# formatter = logging.Formatter('%(asctime)-20s %(name)-10s %(levelname)-8s %(message)s', "%Y-%m-%d %H:%M:%S")
#
# handler.setFormatter(formatter)
# logger.addHandler(handler)
# logger.setLevel(logging.DEBUG)
# return logger
class LRSync:
def __init__(self, catalogs_path=None):
self.catalogs = []
if catalogs_path is None:
self.catalogs_path = Path("/Users/johannes/Dropbox/Pictures/lightroom/catalogs/lr_catalogs.json")
@staticmethod
def get_volumes_list():
"""returns a list of volumes"""
return [os.path.join('/Volumes', d) for d in os.listdir('/Volumes')]
@staticmethod
def save_json(path, data):
path = Path(path)
with open(path, 'w') as outfile:
json.dump(data, outfile)
@staticmethod
def hashfile(path, blocksize=65536):
file = open(path, 'rb')
hasher = hashlib.md5()
buf = file.read(blocksize)
while len(buf) > 0:
hasher.update(buf)
buf = file.read(blocksize)
file.close()
return hasher.hexdigest()
def load_catalogs(self):
if self.catalogs_path.exists():
logging.debug(f"Loading paths from {self.catalogs_path}")
with self.catalogs_path.open(mode="r") as json_file:
catalogs_meta = json.load(json_file)
return catalogs_meta
else:
logging.info(f"Couldn't load catalogs file from {self.catalogs_path}")
return {}
def get_catalogs(self, directory, save_to_file=True):
catalogs_meta = {}
directory = Path(directory)
# Load paths from existing catalogs file if it exists
catalogs_meta = self.load_catalogs()
# Check if folder exists
if directory.exists():
logging.info(f"Scanning {directory} for Lightroom catalogs")
# Walk through files and directories
for root, dirs, files in os.walk(directory, topdown=True):
# Skip backups folders
if "Backups" in root:
continue
# Skip lrdata folders
dirs[:] = [x for x in dirs if not x.endswith(".lrdata")]
# Get catalog files
catalogs = [c for c in files if c.endswith('lrcat') and 'backups' not in root.lower()]
# Save all catalogs and paths
for cat in catalogs:
cat_path = str(Path(os.path.join(root, cat)).resolve())
# Add new catalog to data or append if it already exists
if cat not in catalogs_meta:
logging.debug(f"Adding {cat} to list of catalogs")
catalogs_meta[cat] = {
"sync": True,
"paths": [cat_path]
}
else:
logging.debug(f"{cat} already exists in list of catalogs, adding additional path")
if cat_path not in catalogs_meta[cat]["paths"]:
catalogs_meta[cat]["paths"].append(cat_path)
# Save paths to file
if save_to_file:
self.save_json(self.catalogs_path, catalogs_meta)
else:
logging.warning("Directory doesn't exist")
return None
return catalogs_meta
def get_latest_catalog(self, catalog_name):
catalog_meta = self.load_catalogs()
catalog_paths = catalog_meta[catalog_name]['paths']
paths = {}
# Iterate through paths
for cat in catalog_paths:
# Set some meta variables
path = Path(cat)
catalog_change_date = path.lstat().st_mtime
# Create new key if not existing
if not paths:
paths = {
path.absolute().resolve(): catalog_change_date
}
else:
paths[path.absolute().resolve()] = catalog_change_date
# Get most recently modified file
latest = max(paths, key=paths.get)
return latest
def main():
"""docstring for main"""
# Create LRSync object
lr_sync = LRSync()
# Get list of volumes
# volumes = lr_sync.get_volumes_list()
# print(volumes)
# Scan for catalogs and save to file
# path = "/Volumes/mcdaddy/Pictures/lightroom/catalogs"
# catalogs = lr_sync.get_catalogs(path)
# pprint.pprint(catalogs)
# Get latest catalog
print(lr_sync.get_latest_catalog("lr_classic_2019_002.lrcat"))
# List catalogs with more than one copy
cat_meta = lr_sync.load_catalogs()
inc = 1
for c in sorted(cat_meta):
copy_count = len(cat_meta[c]['paths'])
if copy_count > 1:
print(f"{inc}: {c} ({copy_count})")
inc += 1
def cli():
"""Command line interface"""
# Load LRSync object
lr_sync = LRSync()
catalog_meta = lr_sync.load_catalogs()
syncable_catalogs = {x: y for x, y in catalog_meta.items() if len(y["paths"]) > 1}
# Create the parser
parser = argparse.ArgumentParser(description="Sync Lightroom catalogs")
# Add the arguments
parser.add_argument("-s", "--sync",
type=str,
help="Sync catalogs",
action="store")
parser.add_argument("-l", "--list",
help="Move files instead of copy",
action="store_true")
# Execute the parse_args() method
args = parser.parse_args()
# Print the title
print("Lightroom Sync")
print("")
if args.list:
print("Here are all the catalogs with multiple paths.")
print("The most recently modified is marked with a star.")
for cat in sorted(syncable_catalogs):
print(cat)
latest = str(lr_sync.get_latest_catalog(cat))
for p in catalog_meta[cat]["paths"]:
path = Path(p)
formatted_time = datetime.datetime.utcfromtimestamp(path.lstat().st_mtime).strftime('%Y-%m-%d %H:%M:%S')
if p == latest:
print(f"* \t{p} ({formatted_time})")
else:
print(f"\t{p} ({formatted_time})")
print("")
# if args.sync is None:
# print(f"Choose a catalog to sync:")
# for n, cat in enumerate(sorted(syncable_catalogs), 1):
# print(f"{n}: {cat}")
#
# while True:
# try:
# source_input = int(input("> ").strip())
# if volumes.get(source_input):
# source = volumes[source_input]
# break
# else:
# print("Invalid choice. Try again.")
#
# except:
# print("Invalid selection")
# exit(1)
# else:
# source = args.source
print("")
if __name__ == '__main__':
setup_logger()
cli()
| 31.258333 | 120 | 0.559717 |
acf4f6de24101f6c46a4b0d3f1520d88f77f7ba0 | 270 | py | Python | momentum/utils.py | StannisZhou/mixed_hmc | 7925b3290c9ba692a2afbe06e102ea88a7cea511 | [
"MIT"
] | 11 | 2020-08-30T01:57:19.000Z | 2022-03-02T12:34:02.000Z | momentum/utils.py | StannisZhou/mixed_hmc | 7925b3290c9ba692a2afbe06e102ea88a7cea511 | [
"MIT"
] | 1 | 2021-07-13T10:06:21.000Z | 2021-07-13T17:41:11.000Z | momentum/utils.py | StannisZhou/mixed_hmc | 7925b3290c9ba692a2afbe06e102ea88a7cea511 | [
"MIT"
] | 1 | 2021-08-25T08:03:48.000Z | 2021-08-25T08:03:48.000Z | import numpy as np
import jax
def jax_prng_key():
return jax.random.PRNGKey(np.random.randint(int(1e5)))
def categorical(key, p, shape=()):
s = jax.numpy.cumsum(p)
r = jax.random.uniform(key, shape=shape + (1,))
return jax.numpy.sum(s < r, axis=-1)
| 19.285714 | 58 | 0.651852 |
acf4f73b8a2d1631cabd422e3bdc169a76259c3d | 843 | py | Python | setup.py | ravenkls/lusportscentre | 947c8bc58ce7a0ff0b4afdcff9ec7b859f17c39b | [
"MIT"
] | 1 | 2021-05-10T13:38:32.000Z | 2021-05-10T13:38:32.000Z | setup.py | ravenkls/lusportscentre | 947c8bc58ce7a0ff0b4afdcff9ec7b859f17c39b | [
"MIT"
] | null | null | null | setup.py | ravenkls/lusportscentre | 947c8bc58ce7a0ff0b4afdcff9ec7b859f17c39b | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
import re
requirements = []
with open("Pipfile") as f:
pipfile = f.read()
pipfile_packages = re.findall(r"\[packages\](\n(?:.*\n)+?)\n", pipfile)[0]
for package_line in pipfile_packages.strip().split("\n"):
requirements.append(package_line.split("=")[0].strip())
version = ""
with open("lusportscentre/__init__.py") as f:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE
).group(1)
setup(
name="lusportscentre",
url="https://www.github.com/ravenkls/lusportscentre/",
version=version,
packages=find_packages(),
license="MIT",
description="A simple wrapper for the Lancaster Sports Centre API, allowing you to book sessions etc.",
install_requires=requirements,
python_requires=">=3.8.0",
)
| 30.107143 | 107 | 0.652432 |
acf4f807babf4d84b0ef124d4f8058090cae638c | 12,732 | py | Python | src/oci/vulnerability_scanning/models/container_scan_result_summary.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/vulnerability_scanning/models/container_scan_result_summary.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | src/oci/vulnerability_scanning/models/container_scan_result_summary.py | pabs3/oci-python-sdk | 437ba18ce39af2d1090e277c4bb8750c89f83021 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # coding: utf-8
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class ContainerScanResultSummary(object):
"""
A summary of a result for a container scan
"""
#: A constant which can be used with the highest_problem_severity property of a ContainerScanResultSummary.
#: This constant has a value of "NONE"
HIGHEST_PROBLEM_SEVERITY_NONE = "NONE"
#: A constant which can be used with the highest_problem_severity property of a ContainerScanResultSummary.
#: This constant has a value of "LOW"
HIGHEST_PROBLEM_SEVERITY_LOW = "LOW"
#: A constant which can be used with the highest_problem_severity property of a ContainerScanResultSummary.
#: This constant has a value of "MEDIUM"
HIGHEST_PROBLEM_SEVERITY_MEDIUM = "MEDIUM"
#: A constant which can be used with the highest_problem_severity property of a ContainerScanResultSummary.
#: This constant has a value of "HIGH"
HIGHEST_PROBLEM_SEVERITY_HIGH = "HIGH"
#: A constant which can be used with the highest_problem_severity property of a ContainerScanResultSummary.
#: This constant has a value of "CRITICAL"
HIGHEST_PROBLEM_SEVERITY_CRITICAL = "CRITICAL"
def __init__(self, **kwargs):
"""
Initializes a new ContainerScanResultSummary object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this ContainerScanResultSummary.
:type id: str
:param repository:
The value to assign to the repository property of this ContainerScanResultSummary.
:type repository: str
:param image:
The value to assign to the image property of this ContainerScanResultSummary.
:type image: str
:param compartment_id:
The value to assign to the compartment_id property of this ContainerScanResultSummary.
:type compartment_id: str
:param container_scan_target_id:
The value to assign to the container_scan_target_id property of this ContainerScanResultSummary.
:type container_scan_target_id: str
:param highest_problem_severity:
The value to assign to the highest_problem_severity property of this ContainerScanResultSummary.
Allowed values for this property are: "NONE", "LOW", "MEDIUM", "HIGH", "CRITICAL", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type highest_problem_severity: str
:param problem_count:
The value to assign to the problem_count property of this ContainerScanResultSummary.
:type problem_count: int
:param time_started:
The value to assign to the time_started property of this ContainerScanResultSummary.
:type time_started: datetime
:param time_finished:
The value to assign to the time_finished property of this ContainerScanResultSummary.
:type time_finished: datetime
"""
self.swagger_types = {
'id': 'str',
'repository': 'str',
'image': 'str',
'compartment_id': 'str',
'container_scan_target_id': 'str',
'highest_problem_severity': 'str',
'problem_count': 'int',
'time_started': 'datetime',
'time_finished': 'datetime'
}
self.attribute_map = {
'id': 'id',
'repository': 'repository',
'image': 'image',
'compartment_id': 'compartmentId',
'container_scan_target_id': 'containerScanTargetId',
'highest_problem_severity': 'highestProblemSeverity',
'problem_count': 'problemCount',
'time_started': 'timeStarted',
'time_finished': 'timeFinished'
}
self._id = None
self._repository = None
self._image = None
self._compartment_id = None
self._container_scan_target_id = None
self._highest_problem_severity = None
self._problem_count = None
self._time_started = None
self._time_finished = None
@property
def id(self):
"""
**[Required]** Gets the id of this ContainerScanResultSummary.
The `OCID`__ of container scan result. Immutable and generated on creation.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The id of this ContainerScanResultSummary.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this ContainerScanResultSummary.
The `OCID`__ of container scan result. Immutable and generated on creation.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param id: The id of this ContainerScanResultSummary.
:type: str
"""
self._id = id
@property
def repository(self):
"""
**[Required]** Gets the repository of this ContainerScanResultSummary.
Repository in which the container image scanned is located
:return: The repository of this ContainerScanResultSummary.
:rtype: str
"""
return self._repository
@repository.setter
def repository(self, repository):
"""
Sets the repository of this ContainerScanResultSummary.
Repository in which the container image scanned is located
:param repository: The repository of this ContainerScanResultSummary.
:type: str
"""
self._repository = repository
@property
def image(self):
"""
**[Required]** Gets the image of this ContainerScanResultSummary.
Image name
:return: The image of this ContainerScanResultSummary.
:rtype: str
"""
return self._image
@image.setter
def image(self, image):
"""
Sets the image of this ContainerScanResultSummary.
Image name
:param image: The image of this ContainerScanResultSummary.
:type: str
"""
self._image = image
@property
def compartment_id(self):
"""
**[Required]** Gets the compartment_id of this ContainerScanResultSummary.
The `OCID`__ of the container scan result's compartment. This is set to the same as the compartmentId of the container scan target
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The compartment_id of this ContainerScanResultSummary.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this ContainerScanResultSummary.
The `OCID`__ of the container scan result's compartment. This is set to the same as the compartmentId of the container scan target
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param compartment_id: The compartment_id of this ContainerScanResultSummary.
:type: str
"""
self._compartment_id = compartment_id
@property
def container_scan_target_id(self):
"""
Gets the container_scan_target_id of this ContainerScanResultSummary.
The `OCID`__ of container scan target. Immutable and generated on creation.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:return: The container_scan_target_id of this ContainerScanResultSummary.
:rtype: str
"""
return self._container_scan_target_id
@container_scan_target_id.setter
def container_scan_target_id(self, container_scan_target_id):
"""
Sets the container_scan_target_id of this ContainerScanResultSummary.
The `OCID`__ of container scan target. Immutable and generated on creation.
__ https://docs.cloud.oracle.com/iaas/Content/General/Concepts/identifiers.htm
:param container_scan_target_id: The container_scan_target_id of this ContainerScanResultSummary.
:type: str
"""
self._container_scan_target_id = container_scan_target_id
@property
def highest_problem_severity(self):
"""
**[Required]** Gets the highest_problem_severity of this ContainerScanResultSummary.
Highest problem severity in this report
Allowed values for this property are: "NONE", "LOW", "MEDIUM", "HIGH", "CRITICAL", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The highest_problem_severity of this ContainerScanResultSummary.
:rtype: str
"""
return self._highest_problem_severity
@highest_problem_severity.setter
def highest_problem_severity(self, highest_problem_severity):
"""
Sets the highest_problem_severity of this ContainerScanResultSummary.
Highest problem severity in this report
:param highest_problem_severity: The highest_problem_severity of this ContainerScanResultSummary.
:type: str
"""
allowed_values = ["NONE", "LOW", "MEDIUM", "HIGH", "CRITICAL"]
if not value_allowed_none_or_none_sentinel(highest_problem_severity, allowed_values):
highest_problem_severity = 'UNKNOWN_ENUM_VALUE'
self._highest_problem_severity = highest_problem_severity
@property
def problem_count(self):
"""
**[Required]** Gets the problem_count of this ContainerScanResultSummary.
Total number of problems found in this scan
:return: The problem_count of this ContainerScanResultSummary.
:rtype: int
"""
return self._problem_count
@problem_count.setter
def problem_count(self, problem_count):
"""
Sets the problem_count of this ContainerScanResultSummary.
Total number of problems found in this scan
:param problem_count: The problem_count of this ContainerScanResultSummary.
:type: int
"""
self._problem_count = problem_count
@property
def time_started(self):
"""
**[Required]** Gets the time_started of this ContainerScanResultSummary.
Date and time the scan was started, as described in `RFC 3339`__
__ https://tools.ietf.org/rfc/rfc3339
:return: The time_started of this ContainerScanResultSummary.
:rtype: datetime
"""
return self._time_started
@time_started.setter
def time_started(self, time_started):
"""
Sets the time_started of this ContainerScanResultSummary.
Date and time the scan was started, as described in `RFC 3339`__
__ https://tools.ietf.org/rfc/rfc3339
:param time_started: The time_started of this ContainerScanResultSummary.
:type: datetime
"""
self._time_started = time_started
@property
def time_finished(self):
"""
**[Required]** Gets the time_finished of this ContainerScanResultSummary.
Date and time the scan was completed, as described in `RFC 3339`__
__ https://tools.ietf.org/rfc/rfc3339
:return: The time_finished of this ContainerScanResultSummary.
:rtype: datetime
"""
return self._time_finished
@time_finished.setter
def time_finished(self, time_finished):
"""
Sets the time_finished of this ContainerScanResultSummary.
Date and time the scan was completed, as described in `RFC 3339`__
__ https://tools.ietf.org/rfc/rfc3339
:param time_finished: The time_finished of this ContainerScanResultSummary.
:type: datetime
"""
self._time_finished = time_finished
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 34.692098 | 245 | 0.672008 |
acf4f829e1a30bb2d598d2bfe224a58c19fc2d3d | 1,514 | py | Python | synonyms/views.py | xavierigor/synonym-api | d23a3c92b936b75ba67e70828464a716aa121901 | [
"MIT"
] | null | null | null | synonyms/views.py | xavierigor/synonym-api | d23a3c92b936b75ba67e70828464a716aa121901 | [
"MIT"
] | null | null | null | synonyms/views.py | xavierigor/synonym-api | d23a3c92b936b75ba67e70828464a716aa121901 | [
"MIT"
] | null | null | null | from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from config.settings import SYNONYM_API_BASE_URL
from synonyms.constants import PARAM_NOT_SPECIFIED_ERROR
class SynonymList(APIView):
def _fetch_synonyms(self, term):
import requests
from bs4 import BeautifulSoup
url = SYNONYM_API_BASE_URL + term
response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
synonym_sets = soup.select('#page #content div .s-wrapper')
formatted_dict = dict(term=term, results=[])
for synonym_set in synonym_sets:
meaning = synonym_set.select_one('.sentido')
if meaning is not None and meaning.text[-1] == ':':
# If meaning exists and last character is a colon,
# remove colon
meaning = meaning.text[:len(meaning.text)-1]
synonyms = [synonym.text for synonym in synonym_set.select('.sinonimos .sinonimo')]
formatted_dict['results'].append({
'meaning': meaning,
'synonyms': synonyms
})
return formatted_dict
def get(self, request, format=None):
term = request.query_params.get('term', None)
if term is None:
return Response({"message": PARAM_NOT_SPECIFIED_ERROR}, status=status.HTTP_400_BAD_REQUEST)
synonyms = self._fetch_synonyms(term=term)
return Response(synonyms)
| 34.409091 | 103 | 0.650594 |
acf4f893a99dd73ca0dc3e8a8461049ea8145e28 | 1,639 | py | Python | Minor Project/Weather GUI/main.py | ComputerScientist-01/Technocolabs-Internship-Project | 3675cc6b9a40a885a29b105ec9b29945a1e4620c | [
"MIT"
] | 4 | 2020-07-08T11:32:29.000Z | 2021-08-05T02:54:02.000Z | Minor Project/Weather GUI/main.py | ComputerScientist-01/Technocolabs-Internship-Project | 3675cc6b9a40a885a29b105ec9b29945a1e4620c | [
"MIT"
] | null | null | null | Minor Project/Weather GUI/main.py | ComputerScientist-01/Technocolabs-Internship-Project | 3675cc6b9a40a885a29b105ec9b29945a1e4620c | [
"MIT"
] | null | null | null | from matplotlib import pyplot as plt
from matplotlib import dates
from pyowm_helper import get_temperature
degree_sign= u'\N{DEGREE SIGN}'
def init_plot():
plt.figure('PyOWM Weather', figsize=(5,4))
plt.xlabel('Day')
plt.ylabel(f'Temperature ({degree_sign}F)')
plt.title('Weekly Forecast')
def plot_temperatures(days, temp_min, temp_max):
days = dates.date2num(days)
bar_min = plt.bar(days-.25, temp_min, width=0.5, color='#4286f4')
bar_max = plt.bar(days+.25, temp_max, width=0.5, color='#e58510')
return (bar_min, bar_max)
def label_xaxis(days):
plt.xticks(days)
axes = plt.gca()
xaxis_format = dates.DateFormatter('%m/%d')
axes.xaxis.set_major_formatter(xaxis_format)
def write_temperatures_on_bar_chart(bar_min, bar_max):
axes = plt.gca()
y_axis_max = axes.get_ylim()[1]
label_offset = y_axis_max * .1
# Write the temperatures on the chart
for bar_chart in [bar_min, bar_max]:
for index, bar in enumerate(bar_chart):
height = bar.get_height()
xpos = bar.get_x() + bar.get_width()/2.0
ypos = height - label_offset
label_text = str(int(height)) + degree_sign
plt.text(xpos, ypos, label_text,
horizontalalignment='center',
verticalalignment='bottom',
color='white')
if __name__ == '__main__':
init_plot()
days, temp_min, temp_max = get_temperature()
bar_min, bar_max = plot_temperatures(days, temp_min, temp_max)
label_xaxis(days)
write_temperatures_on_bar_chart(bar_min, bar_max)
plt.show()
| 28.754386 | 69 | 0.649786 |
acf4f8ad62bb07d3e7f5872bb5e589f79f7561a9 | 6,200 | py | Python | hail/python/hailtop/utils/validate/validate.py | FINNGEN/hail | 03fabf5dad71415aeca641ef1618e5352639d683 | [
"MIT"
] | 1 | 2022-02-10T18:39:56.000Z | 2022-02-10T18:39:56.000Z | hail/python/hailtop/utils/validate/validate.py | FINNGEN/hail | 03fabf5dad71415aeca641ef1618e5352639d683 | [
"MIT"
] | 46 | 2020-11-02T02:23:14.000Z | 2022-03-24T21:26:28.000Z | hail/python/hailtop/utils/validate/validate.py | FINNGEN/hail | 03fabf5dad71415aeca641ef1618e5352639d683 | [
"MIT"
] | 1 | 2021-05-19T16:05:26.000Z | 2021-05-19T16:05:26.000Z | from typing import Union, Dict, Pattern, Callable, Any, List
import re
import logging
log = logging.getLogger('foo')
class ValidationError(Exception):
def __init__(self, reason):
super().__init__()
self.reason = reason
class RequiredKey:
def __init__(self, key: str):
self.key = key
Key = Union[str, RequiredKey]
class TypedValidator:
def __init__(self, t):
self.t = t
def validate(self, name, obj):
if not isinstance(obj, self.t):
raise ValidationError(f'{name} is not {self.t}')
class DictValidator(TypedValidator):
def __init__(self, vchecker: 'Validator'):
super().__init__(dict)
self.vchecker = vchecker
def validate(self, name: str, obj):
super().validate(name, obj)
for k, v in obj.items():
if not isinstance(k, str):
raise ValidationError(f'{name} has non-str key')
self.vchecker.validate(f"{name}[{k}]", v)
class KeyedValidator(TypedValidator):
def __init__(self, keyed_checkers: Dict[Key, 'Validator']):
super().__init__(dict)
self.checkers = {}
for k, v in keyed_checkers.items():
if isinstance(k, RequiredKey):
self.checkers[k.key] = (v, True)
else:
self.checkers[k] = (v, False)
def __getitem__(self, key: str):
return self.checkers[key][0]
def validate(self, name: str, obj):
super().validate(name, obj)
unknown_keys = set(obj.keys()) - set(self.checkers.keys())
if len(unknown_keys) != 0:
raise ValidationError(f'unknown keys in {name}: {unknown_keys}')
for k, (checker, required) in self.checkers.items():
if required and k not in obj:
raise ValidationError(f'{name} missing required key {k}.')
if k in obj:
checker.validate(f"{name}.{k}", obj[k])
class ListValidator(TypedValidator):
def __init__(self, checker: 'Validator'):
super().__init__(list)
self.checker = checker
def validate(self, name: str, obj):
super().validate(name, obj)
for i, elt in enumerate(obj):
self.checker.validate(f"{name}[{i}]", elt)
class SetValidator:
def __init__(self, valid):
self.valid = valid
def validate(self, name: str, obj):
if obj not in self.valid:
raise ValidationError(f'{name} must be one of: {self.valid}')
class RegexValidator(TypedValidator):
def __init__(self, pattern: str, re_obj: Pattern, maxlen: int):
super().__init__(str)
self.pattern = pattern
self.re_obj = re_obj if re_obj is not None else re.compile(pattern)
self.maxlen = maxlen
def validate(self, name: str, obj):
super().validate(name, obj)
if self.maxlen is not None and len(obj) > self.maxlen:
raise ValidationError(f'length of {name} must be <= {self.maxlen}')
if not self.re_obj.fullmatch(obj):
raise ValidationError(f'{name} must match regex: {self.pattern}')
class NumericValidator:
def __init__(self, conditions: Dict[str, Callable[[Any], Any]]):
self.conditions = conditions
def validate(self, name: str, obj):
if not isinstance(obj, int) and not isinstance(obj, float):
raise ValidationError(f'{name} is not numeric')
for text, condition in self.conditions.items():
if not condition(obj):
raise ValidationError(f'{name} does not satisfy the condition {text}')
class SwitchValidator(TypedValidator):
def __init__(self, key: str, checkers: Dict[str, Dict[Key, 'Validator']]):
super().__init__(dict)
self.key = key
self.valid_key = oneof(*checkers.keys())
self.checkers = {k: keyed({required(key): self.valid_key, **fields})
for k, fields in checkers.items()}
def __getitem__(self, key):
return self.checkers[key]
def validate(self, name: str, obj):
super().validate(name, obj)
key = obj[self.key]
self.valid_key.validate(f"{name}.{key}", key)
self.checkers[key].validate(f"{name}", obj)
class NullableValidator:
def __init__(self, wrapped: 'Validator'):
self.checker = wrapped
def validate(self, name: str, obj):
if obj is not None:
self.checker.validate(name, obj)
class TruthyValidator:
def validate(self, name: str, obj): # pylint: disable=no-self-use
if not obj:
raise ValidationError(f'{name} cannot be {obj}')
class MultipleValidator:
def __init__(self, checkers: List['Validator']):
self.checkers = checkers
def validate(self, name: str, obj):
excs = []
for checker in self.checkers:
try:
return checker.validate(name, obj)
except ValidationError as e:
excs.append(e)
if excs:
reasons = ' or '.join([e.reason for e in excs])
log.info(reasons)
raise ValidationError(f'{name} does not satisfy any conditions: {reasons}')
def required(key: str):
return RequiredKey(key)
str_type = TypedValidator(str)
non_empty_str_type = MultipleValidator([str_type, TruthyValidator()])
bool_type = TypedValidator(bool)
int_type = TypedValidator(int)
Validator = Union[TypedValidator, NumericValidator, NullableValidator, TruthyValidator, SetValidator]
def dictof(vchecker: Validator):
return DictValidator(vchecker)
def keyed(checkers: Dict[Key, Validator]):
return KeyedValidator(checkers)
def listof(checker: Validator):
return ListValidator(checker)
def oneof(*items):
return SetValidator(set(items))
def regex(pattern, re_obj=None, maxlen=None):
return RegexValidator(pattern, re_obj, maxlen)
def nullable(wrapped: Validator):
return NullableValidator(wrapped)
def numeric(**conditions: Callable[[Any], Any]):
return NumericValidator(conditions)
def switch(key: str, checkers: Dict[str, Dict[Key, Validator]]):
return SwitchValidator(key, checkers)
def anyof(*checkers: Validator):
return MultipleValidator(list(checkers))
| 29.245283 | 101 | 0.629677 |
acf4fb028881307f74555471c660c3bb8d6acf2e | 1,802 | py | Python | kubernetes/test/test_v1_secret_volume_source.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_secret_volume_source.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_v1_secret_volume_source.py | mariusgheorghies/python | 68ac7e168963d8b5a81dc493b1973d29e903a15b | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.20.7
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import kubernetes.client
from kubernetes.client.models.v1_secret_volume_source import V1SecretVolumeSource # noqa: E501
from kubernetes.client.rest import ApiException
class TestV1SecretVolumeSource(unittest.TestCase):
"""V1SecretVolumeSource unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test V1SecretVolumeSource
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = kubernetes.client.models.v1_secret_volume_source.V1SecretVolumeSource() # noqa: E501
if include_optional :
return V1SecretVolumeSource(
default_mode = 56,
items = [
kubernetes.client.models.v1/key_to_path.v1.KeyToPath(
key = '0',
mode = 56,
path = '0', )
],
optional = True,
secret_name = '0'
)
else :
return V1SecretVolumeSource(
)
def testV1SecretVolumeSource(self):
"""Test V1SecretVolumeSource"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| 29.540984 | 124 | 0.628191 |
acf4fc1984ba573ff5ac9aae68a8adf756455a80 | 5,371 | py | Python | keras_contrib/utils/test_utils.py | synthetik-technologies/keras-contrib | 9af00fe1469a49b5ebb86e0156c43773d2e8211f | [
"MIT"
] | 1 | 2019-01-08T15:48:03.000Z | 2019-01-08T15:48:03.000Z | keras_contrib/utils/test_utils.py | Tez01/keras-contrib | 9af00fe1469a49b5ebb86e0156c43773d2e8211f | [
"MIT"
] | null | null | null | keras_contrib/utils/test_utils.py | Tez01/keras-contrib | 9af00fe1469a49b5ebb86e0156c43773d2e8211f | [
"MIT"
] | null | null | null | """Utilities related to Keras unit tests."""
import numpy as np
from numpy.testing import assert_allclose
import inspect
import six
from keras.engine import Model, Input
from keras.models import Sequential
from keras import backend as K
def get_test_data(num_train=1000, num_test=500, input_shape=(10,),
output_shape=(2,),
classification=True, num_classes=2):
"""Generates test data to train a model on.
classification=True overrides output_shape
(i.e. output_shape is set to (1,)) and the output
consists in integers in [0, num_class-1].
Otherwise: float output with shape output_shape.
"""
samples = num_train + num_test
if classification:
y = np.random.randint(0, num_classes, size=(samples,))
X = np.zeros((samples,) + input_shape)
for i in range(samples):
X[i] = np.random.normal(loc=y[i], scale=0.7, size=input_shape)
else:
y_loc = np.random.random((samples,))
X = np.zeros((samples,) + input_shape)
y = np.zeros((samples,) + output_shape)
for i in range(samples):
X[i] = np.random.normal(loc=y_loc[i], scale=0.7, size=input_shape)
y[i] = np.random.normal(loc=y_loc[i], scale=0.7, size=output_shape)
return (X[:num_train], y[:num_train]), (X[num_train:], y[num_train:])
def layer_test(layer_cls, kwargs={}, input_shape=None, input_dtype=None,
input_data=None, expected_output=None,
expected_output_dtype=None, fixed_batch_size=False, tolerance=1e-3):
"""Test routine for a layer with a single input tensor
and single output tensor.
"""
# generate input data
if input_data is None:
assert input_shape
if not input_dtype:
input_dtype = K.floatx()
input_data_shape = list(input_shape)
for i, e in enumerate(input_data_shape):
if e is None:
input_data_shape[i] = np.random.randint(1, 4)
input_data = (10 * np.random.random(input_data_shape))
input_data = input_data.astype(input_dtype)
else:
if input_shape is None:
input_shape = input_data.shape
if input_dtype is None:
input_dtype = input_data.dtype
if expected_output_dtype is None:
expected_output_dtype = input_dtype
# instantiation
layer = layer_cls(**kwargs)
# test get_weights , set_weights at layer level
weights = layer.get_weights()
layer.set_weights(weights)
# test and instantiation from weights
if 'weights' in inspect.getargspec(layer_cls.__init__):
kwargs['weights'] = weights
layer = layer_cls(**kwargs)
# test in functional API
if fixed_batch_size:
x = Input(batch_shape=input_shape, dtype=input_dtype)
else:
x = Input(shape=input_shape[1:], dtype=input_dtype)
y = layer(x)
assert K.dtype(y) == expected_output_dtype
# check shape inference
model = Model(x, y)
expected_output_shape = layer.compute_output_shape(input_shape)
actual_output = model.predict(input_data)
actual_output_shape = actual_output.shape
for expected_dim, actual_dim in zip(expected_output_shape,
actual_output_shape):
if expected_dim is not None:
assert expected_dim == actual_dim
if expected_output is not None:
if tolerance is not None:
assert_allclose(actual_output, expected_output, rtol=tolerance)
# test serialization, weight setting at model level
model_config = model.get_config()
recovered_model = Model.from_config(model_config)
if model.weights:
weights = model.get_weights()
recovered_model.set_weights(weights)
_output = recovered_model.predict(input_data)
if tolerance is not None:
assert_allclose(_output, actual_output, rtol=tolerance)
# test training mode (e.g. useful for dropout tests)
model.compile('rmsprop', 'mse')
model.train_on_batch(input_data, actual_output)
# test as first layer in Sequential API
layer_config = layer.get_config()
layer_config['batch_input_shape'] = input_shape
layer = layer.__class__.from_config(layer_config)
model = Sequential()
model.add(layer)
actual_output = model.predict(input_data)
actual_output_shape = actual_output.shape
for expected_dim, actual_dim in zip(expected_output_shape,
actual_output_shape):
if expected_dim is not None:
assert expected_dim == actual_dim
if expected_output is not None:
if tolerance is not None:
assert_allclose(actual_output, expected_output, rtol=1e-3)
# test serialization, weight setting at model level
model_config = model.get_config()
recovered_model = Sequential.from_config(model_config)
if model.weights:
weights = model.get_weights()
recovered_model.set_weights(weights)
_output = recovered_model.predict(input_data)
if tolerance is not None:
assert_allclose(_output, actual_output, rtol=1e-3)
# test training mode (e.g. useful for dropout tests)
model.compile('rmsprop', 'mse')
model.train_on_batch(input_data, actual_output)
# for further checks in the caller function
return actual_output
| 37.041379 | 83 | 0.667846 |
acf4fcd3dadb430bad8f74f3a206c4cebd910720 | 4,411 | py | Python | s4_debugging_and_logging/exercise_files/vae_mnist_working.py | oliverkinch/dtu_mlops | ce3a1f8f02ee95105b7b907735c39ad082321a4b | [
"Apache-2.0"
] | 94 | 2021-06-01T09:53:45.000Z | 2022-03-29T21:06:22.000Z | s4_debugging_and_logging/exercise_files/vae_mnist_working.py | oliverkinch/dtu_mlops | ce3a1f8f02ee95105b7b907735c39ad082321a4b | [
"Apache-2.0"
] | 4 | 2021-06-07T08:28:40.000Z | 2022-01-07T19:56:40.000Z | s4_debugging_and_logging/exercise_files/vae_mnist_working.py | oliverkinch/dtu_mlops | ce3a1f8f02ee95105b7b907735c39ad082321a4b | [
"Apache-2.0"
] | 133 | 2021-06-05T07:20:37.000Z | 2022-03-22T10:56:51.000Z | """
Adapted from
https://github.com/Jackson-Kang/Pytorch-VAE-tutorial/blob/master/01_Variational_AutoEncoder.ipynb
A simple implementation of Gaussian MLP Encoder and Decoder trained on MNIST
"""
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from torch.utils.data import DataLoader
from torchvision.datasets import MNIST
from torchvision.utils import save_image
# Model Hyperparameters
dataset_path = 'datasets'
cuda = torch.cuda.is_available()
DEVICE = torch.device("cuda" if cuda else "cpu")
batch_size = 100
x_dim = 784
hidden_dim = 400
latent_dim = 20
lr = 1e-3
epochs = 5
# Data loading
mnist_transform = transforms.Compose([transforms.ToTensor()])
train_dataset = MNIST(dataset_path, transform=mnist_transform, train=True, download=True)
test_dataset = MNIST(dataset_path, transform=mnist_transform, train=False, download=True)
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=False)
class Encoder(nn.Module):
def __init__(self, input_dim, hidden_dim, latent_dim):
super(Encoder, self).__init__()
self.FC_input = nn.Linear(input_dim, hidden_dim)
self.FC_mean = nn.Linear(hidden_dim, latent_dim)
self.FC_var = nn.Linear (hidden_dim, latent_dim)
self.training = True
def forward(self, x):
h_ = torch.relu(self.FC_input(x))
mean = self.FC_mean(h_)
log_var = self.FC_var(h_)
std = torch.exp(0.5*log_var)
z = self.reparameterization(mean, std)
return z, mean, log_var
def reparameterization(self, mean, std,):
epsilon = torch.randn_like(std)
z = mean + std*epsilon
return z
class Decoder(nn.Module):
def __init__(self, latent_dim, hidden_dim, output_dim):
super(Decoder, self).__init__()
self.FC_hidden = nn.Linear(latent_dim, hidden_dim)
self.FC_output = nn.Linear(hidden_dim, output_dim)
def forward(self, x):
h = torch.relu(self.FC_hidden(x))
x_hat = torch.sigmoid(self.FC_output(h))
return x_hat
class Model(nn.Module):
def __init__(self, Encoder, Decoder):
super(Model, self).__init__()
self.Encoder = Encoder
self.Decoder = Decoder
def forward(self, x):
z, mean, log_var = self.Encoder(x)
x_hat = self.Decoder(z)
return x_hat, mean, log_var
encoder = Encoder(input_dim=x_dim, hidden_dim=hidden_dim, latent_dim=latent_dim)
decoder = Decoder(latent_dim=latent_dim, hidden_dim = hidden_dim, output_dim = x_dim)
model = Model(Encoder=encoder, Decoder=decoder).to(DEVICE)
from torch.optim import Adam
BCE_loss = nn.BCELoss()
def loss_function(x, x_hat, mean, log_var):
reproduction_loss = nn.functional.binary_cross_entropy(x_hat, x, reduction='sum')
KLD = - 0.5 * torch.sum(1+ log_var - mean.pow(2) - log_var.exp())
return reproduction_loss + KLD
optimizer = Adam(model.parameters(), lr=lr)
print("Start training VAE...")
model.train()
for epoch in range(epochs):
overall_loss = 0
for batch_idx, (x, _) in enumerate(train_loader):
x = x.view(batch_size, x_dim)
x = x.to(DEVICE)
optimizer.zero_grad()
x_hat, mean, log_var = model(x)
loss = loss_function(x, x_hat, mean, log_var)
overall_loss += loss.item()
loss.backward()
optimizer.step()
print("\tEpoch", epoch + 1, "complete!", "\tAverage Loss: ", overall_loss / (batch_idx*batch_size))
print("Finish!!")
# Generate reconstructions
model.eval()
with torch.no_grad():
for batch_idx, (x, _) in enumerate(test_loader):
x = x.view(batch_size, x_dim)
x = x.to(DEVICE)
x_hat, _, _ = model(x)
break
save_image(x.view(batch_size, 1, 28, 28), 'orig_data.png')
save_image(x_hat.view(batch_size, 1, 28, 28), 'reconstructions.png')
# Generate samples
with torch.no_grad():
noise = torch.randn(batch_size, latent_dim).to(DEVICE)
generated_images = decoder(noise)
save_image(generated_images.view(batch_size, 1, 28, 28), 'generated_sample.png')
| 31.283688 | 107 | 0.649739 |
acf4fd56e959f67818d1f824163af050ee8b561c | 882 | py | Python | plugins/helper.py | tekavci/suspus | 55ced0049d6a1ecf555fa1636af3b5a23472c161 | [
"MIT"
] | null | null | null | plugins/helper.py | tekavci/suspus | 55ced0049d6a1ecf555fa1636af3b5a23472c161 | [
"MIT"
] | null | null | null | plugins/helper.py | tekavci/suspus | 55ced0049d6a1ecf555fa1636af3b5a23472c161 | [
"MIT"
] | null | null | null | from . import paginate_module, load_module, modules
from pyrogram import Client, filters
from pyrogram.types import InlineKeyboardButton, InlineKeyboardMarkup, Message
from database.lang_utils import get_message as gm, get_message
@Client.on_message(filters.command("hasdasdadadaelp"))
async def help_cmds_(_, message: Message):
chat_id = message.chat.id
user_id = message.from_user.id
if modules:
modules.clear()
load_module(user_id)
keyboard = paginate_module(chat_id, user_id)
keyboard.pop(-1)
keyboard[-1].append(InlineKeyboardButton(f"🗑️ {get_message(chat_id, 'close_btn_name')}", f"close|{user_id}"))
await message.reply(
get_message(chat_id, "here_all_commands"),
reply_markup=InlineKeyboardMarkup(keyboard)
)
modules.clear()
keyboard.clear()
__cmds__ = ["help"]
__help__ = {
"help": "help_help"
}
| 29.4 | 113 | 0.725624 |
acf4fdd60cf2ed6550397bcc2f580aa6e4617340 | 6,657 | py | Python | modules/tests/inv/send_receive_item.py | andygimma/eden | 716d5e11ec0030493b582fa67d6f1c35de0af50d | [
"MIT"
] | 1 | 2019-08-20T16:32:33.000Z | 2019-08-20T16:32:33.000Z | modules/tests/inv/send_receive_item.py | andygimma/eden | 716d5e11ec0030493b582fa67d6f1c35de0af50d | [
"MIT"
] | null | null | null | modules/tests/inv/send_receive_item.py | andygimma/eden | 716d5e11ec0030493b582fa67d6f1c35de0af50d | [
"MIT"
] | null | null | null | """ Sahana Eden Module Automated Tests - INV003 Send - Receive Items
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from helper import InvTestFunctions
class SendReceiveItem(InvTestFunctions):
"""
Inventory Test - Send-Receive Workflow (Send-Receive items)
@Case: INV003
@param items: This test Send-Receive a specific item to another party.
This test assume that regression/inv-mngt has been added to prepop
- e.g. via demo/IFRC_Train
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
def test_inv003_send_receive_items(self):
""" Tests for Send-Receive - Receive Workflow """
user = "admin"
method = "search"
send_data = [("site_id",
"Timor-Leste Red Cross Society (CVTL) National Warehouse (Warehouse)",
"option",
),
("type",
"Internal Shipment",
"option",
),
("to_site_id",
"Lospalos Warehouse (Warehouse)",
"option",
),
("sender_id",
"Beatriz de Carvalho",
"autocomplete",
),
("recipient_id",
"Liliana Otilia",
"autocomplete",
)
]
item_data = [
[("send_inv_item_id",
"Blankets - 123457 - Australian Red Cross",
"inv_widget",
),
("quantity",
"8",
),
],
[("send_inv_item_id",
"Jerry Cans - 123461 - Australian Red Cross",
"inv_widget",
),
("quantity",
"13",
),
],
[("send_inv_item_id",
"Kitchen Sets - 123458 - Australian Red Cross",
"inv_widget",
),
("quantity",
"4",
),
]
]
recv_data = [
["Blankets",
"8",
],
["Jerry Cans",
"13",
],
["Kitchen Sets",
"4",
],
]
# Create the send record
send_result = self.send(user, send_data)
send_id = self.send_get_id(send_result)
send_ref= self.send_get_ref(send_result)
# Add the items to the send record
cnt = 0
for data in item_data:
item_result = self.track_send_item(user, send_id, data)
recv_data[cnt].append(item_result["after"].records[0])
cnt += 1
# Send the shipment
self.send_shipment(user, send_id)
# Receive the shipment
self.recv_sent_shipment(method, user, send_ref, recv_data)
def test_inv021_send_and_confirm(self):
""" Test to send a shipment and confirm that it is receive outside of the system """
user = "admin"
method = "search"
send_data = [("site_id",
"Timor-Leste Red Cross Society (CVTL) National Warehouse (Warehouse)",
"option",
),
("type",
"Internal Shipment",
"option",
),
("to_site_id",
"Lori (Facility)",
"option",
),
("sender_id",
"Beatriz de Carvalho",
"autocomplete",
),
]
item_data = [
[("send_inv_item_id",
"Blankets - 123457 - Australian Red Cross",
"inv_widget",
),
("quantity",
"6",
),
],
[("send_inv_item_id",
"Jerry Cans - 123461 - Australian Red Cross",
"inv_widget",
),
("quantity",
"3",
),
],
[("send_inv_item_id",
"Kitchen Sets - 123458 - Australian Red Cross",
"inv_widget",
),
("quantity",
"2",
),
]
]
# Create the send record
send_result = self.send(user, send_data)
send_id = self.send_get_id(send_result)
send_ref= self.send_get_ref(send_result)
# Add the items to the send record
for data in item_data:
item_result = self.track_send_item(user, send_id, data)
# Send the shipment
self.send_shipment(user, send_id)
# Confirm Receipt of the shipment
self.confirm_received_shipment(user, send_id)
# END =========================================================================
| 37.189944 | 110 | 0.464774 |
acf500867c237a6e7b851ea05e1c1b515b27662f | 270 | py | Python | example.py | Methimpact/flask-terminado | 66610adef1f238b7ea740b940407f505c3ce7233 | [
"BSD-2-Clause"
] | 18 | 2017-04-27T09:02:14.000Z | 2022-01-26T09:46:09.000Z | example.py | nathanielobrown/flask-terminado | 66610adef1f238b7ea740b940407f505c3ce7233 | [
"BSD-2-Clause"
] | null | null | null | example.py | nathanielobrown/flask-terminado | 66610adef1f238b7ea740b940407f505c3ce7233 | [
"BSD-2-Clause"
] | 7 | 2018-02-04T14:45:55.000Z | 2020-09-30T05:36:45.000Z | import flask
from flask_terminado import Terminal
app = flask.Flask(__name__)
@app.route('/')
def home():
return 'home'
terminal = Terminal(app)
terminal.add_terminal('/bash', ['bash'])
if __name__ == '__main__':
terminal.run(port=5000, host='127.0.0.1')
| 15 | 45 | 0.681481 |
acf500bc4cff06fa782acc48cebd01c2963d8483 | 2,501 | py | Python | IMLearn/learners/regressors/polynomial_fitting.py | assafine/IML.HUJI | b81b8beff05b5f120aa21a2f7fe90b4db95174f4 | [
"MIT"
] | null | null | null | IMLearn/learners/regressors/polynomial_fitting.py | assafine/IML.HUJI | b81b8beff05b5f120aa21a2f7fe90b4db95174f4 | [
"MIT"
] | null | null | null | IMLearn/learners/regressors/polynomial_fitting.py | assafine/IML.HUJI | b81b8beff05b5f120aa21a2f7fe90b4db95174f4 | [
"MIT"
] | null | null | null | from __future__ import annotations
from typing import NoReturn
from . import LinearRegression
from ...base import BaseEstimator
import numpy as np
class PolynomialFitting(BaseEstimator):
"""
Polynomial Fitting using Least Squares estimation
"""
def __init__(self, k: int) -> PolynomialFitting:
"""
Instantiate a polynomial fitting estimator
Parameters
----------
k : int
Degree of polynomial to fit
"""
super().__init__()
self._degree = k
self._lr = LinearRegression(include_intercept=False)
def _fit(self, X: np.ndarray, y: np.ndarray) -> NoReturn:
"""
Fit Least Squares model to polynomial transformed samples
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to fit an estimator for
y : ndarray of shape (n_samples, )
Responses of input data to fit to
"""
vander_mat = self.__transform(X)
self._lr.fit(vander_mat, y)
def _predict(self, X: np.ndarray) -> np.ndarray:
"""
Predict responses for given samples using fitted estimator
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to predict responses for
Returns
-------
responses : ndarray of shape (n_samples, )
Predicted responses of given samples
"""
vander_mat = self.__transform(X)
return self._lr.predict(vander_mat)
def _loss(self, X: np.ndarray, y: np.ndarray) -> float:
"""
Evaluate performance under MSE loss function
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Test samples
y : ndarray of shape (n_samples, )
True labels of test samples
Returns
-------
loss : float
Performance under MSE loss function
"""
return self._lr.loss(self.__transform(X), y)
def __transform(self, X: np.ndarray) -> np.ndarray:
"""
Transform given input according to the univariate polynomial transformation
Parameters
----------
X: ndarray of shape (n_samples,)
Returns
-------
transformed: ndarray of shape (n_samples, k+1)
Vandermonde matrix of given samples up to degree k
"""
return np.vander(X, self._degree + 1, True)
| 27.184783 | 83 | 0.57617 |
acf500be66288db9a681d82b7779c8d637cf728b | 2,403 | py | Python | annotation_gui_gcp/lib/views/web_view.py | ZhengZhangCS/OpenSfM | 7a585e6dfe03620a53faa9151ae86e71d9c7b1ff | [
"BSD-2-Clause"
] | 1 | 2021-11-17T07:39:31.000Z | 2021-11-17T07:39:31.000Z | annotation_gui_gcp/lib/views/web_view.py | ZhengZhangCS/OpenSfM | 7a585e6dfe03620a53faa9151ae86e71d9c7b1ff | [
"BSD-2-Clause"
] | null | null | null | annotation_gui_gcp/lib/views/web_view.py | ZhengZhangCS/OpenSfM | 7a585e6dfe03620a53faa9151ae86e71d9c7b1ff | [
"BSD-2-Clause"
] | null | null | null | import abc
import json
import time
from queue import Queue
from flask import Response, jsonify, render_template, request
distinct_colors = [
"#46f0f0",
"#f032e6",
"#bcf60c",
"#fabebe",
"#008080",
"#9a6324",
"#fffac8",
"#800000",
"#aaffc3",
"#808000",
"#3cb44b",
"#ffe119",
"#4363d8",
"#f58231",
"#911eb4",
"#000075",
"#808080",
"#ffffff",
"#000000",
]
class WebView(abc.ABC):
def __init__(self, main_ui, web_app, route_prefix):
self.main_ui = main_ui
self.app = web_app
self.eventQueue = Queue()
self.latlons = main_ui.image_manager.load_latlons()
self.register_routes(route_prefix)
@abc.abstractclassmethod
def sync_to_client(self):
pass
@abc.abstractclassmethod
def process_client_message(self, data):
pass
def template_name(self):
class_name = type(self).__name__
return class_name
def register_routes(self, route):
def send_main_page():
template = self.template_name()
return render_template(f"{template}.html", class_name=template)
self.app.add_url_rule(route, route + "_index", send_main_page)
def postdata():
data = request.get_json()
# Do something with the event received from the client
if data["event"] != "init":
self.process_client_message(data)
# Send a sync event back to the client to reflect the changed state (all views)
self.main_ui.sync_to_client()
return jsonify(success=True)
self.app.add_url_rule(
route + "/postdata", route + "_postdata", postdata, methods=["POST"]
)
# Stream for server -> client updates through Server-Sent Events
def stream():
def eventStream():
while True:
msg = self.eventQueue.get() # blocks until a new message arrives
yield msg
return Response(eventStream(), mimetype="text/event-stream")
self.app.add_url_rule(route + "/stream", route + "_stream", stream)
def send_sse_message(self, data, event_type="sync"):
# Send to the client
data["time"] = time.time()
sse_string = f"event: {event_type}\ndata: {json.dumps(data)}\n\n"
self.eventQueue.put(sse_string)
| 27 | 91 | 0.593841 |
acf50124f0b005e373a614902ba43e7e863b7349 | 2,362 | py | Python | pyDBMS/database/sqlite_database.py | JacksonDorsett/pydb | 3544a99ac5987cb3064fd0ef2f82d83b9ca4107b | [
"MIT"
] | null | null | null | pyDBMS/database/sqlite_database.py | JacksonDorsett/pydb | 3544a99ac5987cb3064fd0ef2f82d83b9ca4107b | [
"MIT"
] | 6 | 2022-01-12T20:36:37.000Z | 2022-02-02T20:59:59.000Z | pyDBMS/database/sqlite_database.py | JacksonDorsett/pydb | 3544a99ac5987cb3064fd0ef2f82d83b9ca4107b | [
"MIT"
] | null | null | null | from pyDBMS.database.connections.db_connection import SQLiteDBConnection
from pyDBMS.database.model_descriptor import SQLiteModelDescriptor
from .abstract_database import AbstractDatabase
from ..dbtype import DBType, DynamicModel, Float, Integer, Model, String
from typing import Union, List
class SQLiteDatabase(AbstractDatabase):
'''Represents the connection to a sqlite database hosted locally.'''
def __init__(self, filename, **kwargs) -> None:
super().__init__(SQLiteDBConnection(filename, **kwargs),model_descriptor=SQLiteModelDescriptor())
def get_tables(self):
cur = self.db_connection.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table';")
return [x[0] for x in cur.fetchall()]
def get_columns(self, table_name):
if table_name not in self.get_tables():
raise KeyError()
cur = self.db_connection.cursor()
data = cur.execute(f'''PRAGMA table_info({table_name});''')
return [x[1] for x in data.fetchall()]
def get_model_meta(self, table_name: str) -> DBType:
cur = self.db_connection.cursor()
cur.execute(f'PRAGMA table_info({table_name});')
primary_keys = []
fields = {}
for row in cur.fetchall():
print(row)
col_name = row[1]
is_nullable = row[3] == 0
dbtype = row[2]
if row[5]:
primary_keys.append(col_name)
fields[col_name] = self.type_mapper.get_type(dbtype)(is_nullable)
return DynamicModel(table_name, fields, primary_keys)
def table_exists(self, table_name):
return super().table_exists(table_name)
def model_exists(self, model: Model) -> bool:
return super().model_exists(model)
def create_model(self, model):
return super().create_model(model)
def insert(self, model):
return super().insert(model)
def delete(self, model_type, override_delete_all=False, **kwargs):
return super().delete(model_type, override_delete_all=override_delete_all, **kwargs)
def select(self, model_type: Union[Model, type], **kwargs) -> List[Model]:
return super().select(model_type, **kwargs)
def update(self, model: Union[Model, List[Model]]) -> int:
return super().update(model)
| 36.338462 | 105 | 0.65326 |
acf5013fcd9251782113c60786d4eb5a3d6c74d8 | 514 | py | Python | app/meda_sync_search/transformers/prescriptions_data_transformer.py | DEV3L/meda-sync-search | c67feb2f2b54ba153dc50e9aba5058d4e7948c92 | [
"Beerware"
] | null | null | null | app/meda_sync_search/transformers/prescriptions_data_transformer.py | DEV3L/meda-sync-search | c67feb2f2b54ba153dc50e9aba5058d4e7948c92 | [
"Beerware"
] | null | null | null | app/meda_sync_search/transformers/prescriptions_data_transformer.py | DEV3L/meda-sync-search | c67feb2f2b54ba153dc50e9aba5058d4e7948c92 | [
"Beerware"
] | null | null | null | from app.meda_sync_search.transformers.prescription_data_transformer import PrescriptionDataTransformer
class PrescriptionsDataTransformer:
def __init__(self, prescriptions_data):
self.prescriptions_data = prescriptions_data
def transform(self):
prescriptions = []
for prescription_data in self.prescriptions_data:
prescription = PrescriptionDataTransformer(prescription_data).transform()
prescriptions.append(prescription)
return prescriptions
| 32.125 | 103 | 0.7607 |
acf501651deb8b9946ec779f6c407e81ee0b91b4 | 3,396 | py | Python | sunshine_conversations_client/model/message_bird_update.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 4 | 2020-09-27T14:28:25.000Z | 2022-02-02T13:51:29.000Z | sunshine_conversations_client/model/message_bird_update.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 3 | 2021-09-30T18:18:58.000Z | 2021-12-04T07:55:23.000Z | sunshine_conversations_client/model/message_bird_update.py | Dima2022/sunshine-conversations-python | 8085a82dc320d97f09bb0174d11dd1865a65404a | [
"Apache-2.0"
] | 5 | 2020-11-07T02:08:18.000Z | 2021-12-07T17:10:23.000Z | # coding: utf-8
"""
Sunshine Conversations API
The version of the OpenAPI document: 9.4.5
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from sunshine_conversations_client.configuration import Configuration
from sunshine_conversations_client.undefined import Undefined
try:
IntegrationUpdateBase = __import__("sunshine_conversations_client.model."+re.sub(r'(?<!^)(?=[A-Z])', '_', "IntegrationUpdateBase").lower(), fromlist=("IntegrationUpdateBase")).IntegrationUpdateBase
except ImportError:
pass
class MessageBirdUpdate(IntegrationUpdateBase):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
}
attribute_map = {
}
nulls = set()
def __init__(self, local_vars_configuration=None, **kwargs): # noqa: E501
"""MessageBirdUpdate - a model defined in OpenAPI""" # noqa: E501
super().__init__(**kwargs)
if (super().openapi_types is not None):
all_types = super().openapi_types.copy()
all_types.update(self.openapi_types)
self.openapi_types = all_types
if (super().attribute_map is not None):
all_attributes = super().attribute_map.copy()
all_attributes.update(self.attribute_map)
self.attribute_map = all_attributes
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self.discriminator = None
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, MessageBirdUpdate):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, MessageBirdUpdate):
return True
return self.to_dict() != other.to_dict()
| 30.594595 | 201 | 0.598351 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.