hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5ae46279229bdbf69399dfaee5f04aa9953dc3fc
| 675
|
py
|
Python
|
tests/test_download.py
|
t-gibson/stock
|
d47fc1f1c4c72024dae186971f5a6eee7633cccc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_download.py
|
t-gibson/stock
|
d47fc1f1c4c72024dae186971f5a6eee7633cccc
|
[
"Apache-2.0"
] | null | null | null |
tests/test_download.py
|
t-gibson/stock
|
d47fc1f1c4c72024dae186971f5a6eee7633cccc
|
[
"Apache-2.0"
] | null | null | null |
import hypothesis.strategies as st
from hypothesis import given
from hypothesis.provisional import urls
from stock import download
@st.composite
def photo(draw):
photographer = draw(st.text())
main_url = draw(urls())
image_url = draw(urls())
return {
"photographer": photographer,
"url": main_url,
"src": {download.PHOTO_SIZE: image_url}
}
@given(urls())
def test_decode_inverts_encode(url):
assert download.decode_url(download.encode_url(url)) == url
@given(photo())
def test_decode_photo_object_inverts_encode(photo_obj):
assert download.decode_photo_object(download.encode_photo_object(photo_obj)) == photo_obj
| 22.5
| 93
| 0.725926
|
f093aadf99429cf2d0f2fdb6cafbf69fe570b544
| 989
|
py
|
Python
|
tests/test-runners/subject/package/tests/test_something.py
|
Mortal/Nuitka
|
5150eeff7ff845ed4993c773449cd81b7f127c6b
|
[
"Apache-2.0"
] | null | null | null |
tests/test-runners/subject/package/tests/test_something.py
|
Mortal/Nuitka
|
5150eeff7ff845ed4993c773449cd81b7f127c6b
|
[
"Apache-2.0"
] | null | null | null |
tests/test-runners/subject/package/tests/test_something.py
|
Mortal/Nuitka
|
5150eeff7ff845ed4993c773449cd81b7f127c6b
|
[
"Apache-2.0"
] | 1
|
2018-12-16T23:51:18.000Z
|
2018-12-16T23:51:18.000Z
|
# Copyright 2018, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Python test originally created or extracted from other peoples work. The
# parts from me are licensed as below. It is at least Free Software where
# it's copied from other people. In these cases, that will normally be
# indicated.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import package.Something
def test_func():
assert package.Something.calledByTest() == 42
| 39.56
| 78
| 0.717897
|
75d9edd88d184220d3916ef1ef6bc897b27cdf70
| 6,142
|
py
|
Python
|
configs/cascade_rcnn_r50_fpn_1x.py
|
droseger/mmdetection
|
355da53ea7c4b061c62c5a8430adce7641bc2894
|
[
"Apache-2.0"
] | 632
|
2019-04-10T02:05:03.000Z
|
2022-03-29T01:58:55.000Z
|
configs/cascade_rcnn_r50_fpn_1x.py
|
singhbhupender1/MaskTrackRCNN
|
10e5d7ded62e0b7c5bf79075d9ee0cc37dc15321
|
[
"Apache-2.0"
] | 56
|
2019-06-02T21:38:31.000Z
|
2022-03-22T10:28:01.000Z
|
configs/cascade_rcnn_r50_fpn_1x.py
|
singhbhupender1/MaskTrackRCNN
|
10e5d7ded62e0b7c5bf79075d9ee0cc37dc15321
|
[
"Apache-2.0"
] | 112
|
2019-04-10T12:01:44.000Z
|
2022-03-29T01:58:49.000Z
|
# model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='modelzoo://resnet50',
backbone=dict(
type='ResNet',
depth=50,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
use_sigmoid_cls=True),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True)
])
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
smoothl1_beta=1 / 9.0,
debug=False),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05, nms=dict(type='nms', iou_thr=0.5), max_per_img=100),
keep_all_stages=False)
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0.5,
with_mask=False,
with_crowd=True,
with_label=True),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=False,
with_crowd=True,
with_label=True),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
img_scale=(1333, 800),
img_norm_cfg=img_norm_cfg,
size_divisor=32,
flip_ratio=0,
with_mask=False,
with_label=False,
test_mode=True))
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/cascade_rcnn_r50_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 29.109005
| 77
| 0.534028
|
0d7dd5004ee78d7bcd7788dde94d89cc8b17a709
| 3,055
|
py
|
Python
|
citrix_hypervisor/datadog_checks/citrix_hypervisor/metrics.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 663
|
2016-08-23T05:23:45.000Z
|
2022-03-29T00:37:23.000Z
|
citrix_hypervisor/datadog_checks/citrix_hypervisor/metrics.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 6,642
|
2016-06-09T16:29:20.000Z
|
2022-03-31T22:24:09.000Z
|
citrix_hypervisor/datadog_checks/citrix_hypervisor/metrics.py
|
tdimnet/integrations-core
|
a78133a3b71a1b8377fa214d121a98647031ab06
|
[
"BSD-3-Clause"
] | 1,222
|
2017-01-27T15:51:38.000Z
|
2022-03-31T18:17:51.000Z
|
# (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import re
from typing import Any, List, Optional, Tuple
SIMPLE_METRICS = {
'memory': '.memory',
'memory_reclaimed_max': '.memory.reclaimed_max',
'memory_reclaimed': '.memory.reclaimed',
'memory_total_kib': '.memory.total_kib',
'memory_free_kib': '.memory.free_kib',
'pool_task_count': '.pool.task_count',
'pool_session_count': '.pool.session_count',
'xapi_memory_usage_kib': '.xapi.memory_usage_kib',
'xapi_free_memory_kib': '.xapi.free_memory_kib',
'xapi_live_memory_kib': '.xapi.live_memory_kib',
'xapi_allocation_kib': '.xapi.allocation_kib',
'xapi_open_fds': '.xapi.open_fds',
}
REGEX_METRICS = [
{'regex': 'sr_([a-z0-9-]+)_cache_misses', 'name': '.cache_misses', 'tags': ('cache_sr',)},
{'regex': 'sr_([a-z0-9-]+)_cache_hits', 'name': '.cache_hits', 'tags': ('cache_sr',)},
{'regex': 'sr_([a-z0-9-]+)_cache_size', 'name': '.cache_size', 'tags': ('cache_sr',)},
{'regex': 'pif_([a-zA-Z0-9-]+)_rx', 'name': '.pif.rx', 'tags': ('interface',)},
{'regex': 'pif_([a-zA-Z0-9-]+)_tx', 'name': '.pif.tx', 'tags': ('interface',)},
{'regex': 'cpu([a-zA-Z0-9-]+)', 'name': '.cpu', 'tags': ('cpu_id',)},
]
# 'MAX' and 'MIN' are not available when the integration is requesting the most granular interval,
# so they are ignored if they appear.
ALLOWED_METRIC_TYPE = ['AVERAGE']
def build_metric(metric_name, logger):
# type: (str, Any) -> Tuple[Optional[str], Optional[List[str]]]
"""
"AVERAGE:host:1e108be9-8ad0-4988-beff-03d8bb1369ae:sr_35c781cf-951d-0456-8190-373e3c08193e_cache_misses"
"AVERAGE:vm:057d0e50-da57-4fde-b0a7-9ebd1bf42a59:memory"
"""
metric_parts = metric_name.split(':')
if len(metric_parts) != 4 or metric_parts[0] not in ALLOWED_METRIC_TYPE:
logger.debug('Unknown format for metric %s', metric_name)
return None, None
name = metric_parts[1]
additional_tags = ['citrix_hypervisor_{}:{}'.format(metric_parts[1], metric_parts[2])]
found = False
if SIMPLE_METRICS.get(metric_parts[-1]):
name += SIMPLE_METRICS[metric_parts[-1]]
else:
found = False
for regex in REGEX_METRICS:
tags_values = [] # type: List[str]
results = re.findall(str(regex['regex']), metric_name)
if len(results) > 0 and isinstance(results[0], tuple):
tags_values = list(results[0])
else:
tags_values = results
if len(tags_values) == len(regex['tags']):
found = True
name += str(regex['name'])
for i in range(len(regex['tags'])):
additional_tags.append('{}:{}'.format(regex['tags'][i], tags_values[i]))
break
if not found:
logger.debug('Ignoring metric %s', metric_name)
return None, None
logger.debug('Found metric %s (%s)', name, metric_name)
return name, additional_tags
| 38.670886
| 108
| 0.616694
|
5543de7f57f79c014d07e4a44bdc27444ae2c12d
| 824
|
py
|
Python
|
tests/test_sentry.py
|
apikay/celery-director
|
3575e9f89690f6f2518c9939be6169fb4383cbed
|
[
"BSD-3-Clause"
] | 351
|
2020-01-30T14:37:48.000Z
|
2022-03-29T11:34:14.000Z
|
tests/test_sentry.py
|
apikay/celery-director
|
3575e9f89690f6f2518c9939be6169fb4383cbed
|
[
"BSD-3-Clause"
] | 53
|
2020-02-14T17:06:48.000Z
|
2022-03-22T14:37:36.000Z
|
tests/test_sentry.py
|
LiniusAustPty/celery-director
|
5308c49e1f8502e244765025eb75b45bbe3c2d45
|
[
"BSD-3-Clause"
] | 33
|
2020-01-31T14:27:21.000Z
|
2022-03-10T19:50:06.000Z
|
from director.extensions import DirectorSentry, cel
def test_sentry_enrich_data(app, create_builder):
_, wf = create_builder("example", "WORKFLOW", {})
sentry = DirectorSentry()
sentry.init_app(app)
tags = sentry.enrich_tags(
{"foo": "bar"}, wf.workflow_id, cel.tasks.get("TASK_EXAMPLE")
)
assert tags == {
"foo": "bar",
"celery_task_name": "TASK_EXAMPLE",
"director_workflow_id": str(wf.workflow_id),
"director_workflow_project": "example",
"director_workflow_name": "example.WORKFLOW",
}
extra = sentry.enrich_extra(
{"foo": "bar"}, [{"key": "value"}], {"payload": {"hello": "world"}}
)
assert extra == {
"foo": "bar",
"task-args": [{"key": "value"}],
"workflow-payload": {"hello": "world"},
}
| 29.428571
| 75
| 0.583738
|
d92e2c9a6ea88ad03ec8b4bd8fe40fba921d2e48
| 4,762
|
py
|
Python
|
gym_recommendation/utils.py
|
sadighian/recommendation-gym
|
96aaa27d17ae65b02eb0bcdadfcdbb12ea7f8c33
|
[
"Apache-2.0"
] | 29
|
2019-10-28T08:44:02.000Z
|
2021-11-19T19:15:30.000Z
|
gym_recommendation/utils.py
|
RedBanies3ofThem/recommendation-gym
|
96aaa27d17ae65b02eb0bcdadfcdbb12ea7f8c33
|
[
"Apache-2.0"
] | 1
|
2020-12-06T12:28:51.000Z
|
2021-08-01T09:01:08.000Z
|
gym_recommendation/utils.py
|
RedBanies3ofThem/recommendation-gym
|
96aaa27d17ae65b02eb0bcdadfcdbb12ea7f8c33
|
[
"Apache-2.0"
] | 6
|
2020-05-31T20:01:22.000Z
|
2022-01-05T10:59:06.000Z
|
import os
import zipfile
from datetime import datetime as dt
from typing import Dict, List, Tuple
import pandas as pd
import requests
from stable_baselines.common.base_class import ActorCriticRLModel
from . import RecoEnv
DATA_HEADER = "user id | item id | rating | timestamp"
ITEM_HEADER = "movie id | movie title | release date | video release date | IMDb URL | " \
"unknown | Action | Adventure | Animation | Children's | Comedy | Crime | " \
"Documentary | Drama | Fantasy | Film-Noir | Horror | Musical | Mystery | " \
"Romance | Sci-Fi | Thriller | War | Western"
USER_HEADER = "user id | age | gender | occupation | zip code"
# Static file path for saving and importing data set
# `gym_recommendation/data/...`
CWD = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data')
def download_data() -> None:
"""
Helper function to download MovieLens 100k data set and save to the `ml-100k`
directory within the `/data` folder.
"""
start_time = dt.now()
print("Starting data download. Saving to {}".format(CWD))
if not os.path.exists(CWD):
print('download_data() --> Making ./data/* directory...')
os.mkdir(CWD)
if not os.path.exists(os.path.join(CWD, 'ml-100k')):
print('download_data() --> Making ./data/ml-100k/* directory...')
os.mkdir(os.path.join(CWD, 'ml-100k'))
url = 'http://files.grouplens.org/datasets/movielens/ml-100k.zip'
r = requests.get(url)
if r.status_code != 200:
print('download_data() --> Error: could not download ml100k')
zip_file_path = os.path.join(CWD, 'ml-100k.zip')
with open(zip_file_path, 'wb') as f:
f.write(r.content)
with zipfile.ZipFile(zip_file_path, 'r') as f_zip:
f_zip.extractall(path=CWD)
elapsed = (dt.now() - start_time).seconds
print('download_data() --> completed in {} seconds.'.format(elapsed))
else:
print('Using cached data located at {}.'.format(os.path.join(CWD, 'ml-100k')))
def convert_header_to_camel_case(headers: str) -> List[str]:
"""Take headers available in ML 100k doc and convert it to a list of strings
Example:
convert "user id | item id | rating | timestamp"
to ['user_id', 'item_id', 'rating', 'timestamp']
"""
return headers.replace(' ', '_').split('_|_')
def import_data() -> Tuple[pd.DataFrame, pd.DataFrame, pd.DataFrame]:
"""
Helper function to import MovieLens 100k data set into Panda DataFrames.
:return: Three DataFrames:
(1) Movie rating data
(2) Movie reference data
(3) User reference data
"""
data = pd.read_csv(
os.path.join(CWD, 'ml-100k', 'u.data'),
delimiter='\t',
names=convert_header_to_camel_case(DATA_HEADER),
encoding='latin-1'
)
item = pd.read_csv(
os.path.join(CWD, 'ml-100k', 'u.item'),
delimiter='|',
names=convert_header_to_camel_case(ITEM_HEADER),
encoding='latin-1'
)
user = pd.read_csv(
os.path.join(CWD, 'ml-100k', 'u.user'),
delimiter='|',
names=convert_header_to_camel_case(USER_HEADER),
encoding='latin-1'
)
return data, item, user
def import_data_for_env() -> Dict[str, pd.DataFrame]:
"""
Helper function to download and import MovieLens 100k data set into Panda DataFrames.
Function first checks if the data is already downloaded in the
`gym_recommendation/data/` directory, and if not, downloads the data set.
:return: Three DataFrames:
(1) Movie rating data
(2) Movie reference data
(3) User reference data
"""
download_data()
kwargs = dict([(label, data) for label, data
in zip(['data', 'item', 'user'], import_data())])
return kwargs
def evaluate(model: ActorCriticRLModel, env: RecoEnv, num_steps: int = 1000) -> None:
"""
Evaluate a RL agent
"""
start_time = dt.now()
obs = env.reset()
step_count = 0
episode_number = 1
for i in range(num_steps):
step_count += 1
action, _states = model.predict(obs)
obs, reward, done, info = env.step(action)
if done:
elapsed = (dt.now() - start_time).seconds
print(f"**************EPISODE #{episode_number}****************")
print(f"Total steps = {step_count} | steps/second = {step_count / elapsed}")
print(f"Total correct predictions = {env.total_correct_predictions}")
print(f"Prediction accuracy = {env.total_correct_predictions / step_count}")
obs = env.reset()
step_count = 0
episode_number += 1
start_time = dt.now()
| 33.77305
| 91
| 0.615708
|
d1f3dfa3774f8ae4f368d07048cfac65b43c2a4d
| 227
|
py
|
Python
|
task/w1/practic/11-100A.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | 1
|
2019-09-29T14:19:54.000Z
|
2019-09-29T14:19:54.000Z
|
task/w1/practic/11-100A.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | null | null | null |
task/w1/practic/11-100A.py
|
beregok/pythontask
|
50394ff2b52ab4f3273ec9ddc4b504d1f7b3159e
|
[
"MIT"
] | null | null | null |
# Запишіть букву 'A' (латинську, заголовну) 100 разів поспіль. Здайте на перевірку програму, яка виводить цей рядок (тільки літери, без лапок або пробілів).
## Формат виведення
# Виведіть відповідь до задачі.
print("A"*100)
| 28.375
| 156
| 0.753304
|
c6ab46112a29bb3ac8804a28775fd6702c0c85e5
| 1,661
|
py
|
Python
|
setup.py
|
catalystneuro/ndx-bipolar-scheme
|
33850abe0c1fe232af24a306db19906f6ab111a6
|
[
"BSD-3-Clause"
] | 1
|
2019-11-19T20:13:21.000Z
|
2019-11-19T20:13:21.000Z
|
setup.py
|
catalystneuro/ndx-bipolar-scheme
|
33850abe0c1fe232af24a306db19906f6ab111a6
|
[
"BSD-3-Clause"
] | 11
|
2020-04-13T18:45:50.000Z
|
2021-07-19T17:07:21.000Z
|
setup.py
|
ben-dichter-consulting/ndx-bipolar-scheme
|
06ddcfcf05bde5db61a5e2db0b56dc4d6041f34b
|
[
"BSD-3-Clause"
] | 2
|
2020-05-06T18:25:45.000Z
|
2020-05-27T15:53:13.000Z
|
# -*- coding: utf-8 -*-
import os
from os import path
from setuptools import setup, find_packages
from shutil import copy2
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup_args = {
'name': 'ndx-bipolar-scheme',
'version': '0.3.1',
'description': 'An NWB extension for storing bipolar scheme',
'author': 'Ben Dichter, Armin Najarpour, Ryan Ly',
'author_email': 'ben.dichter@catalystneuro.com',
'url': 'https://github.com/catalystneuro/ndx-bipolar-scheme',
'license': 'BSD 3-Clause',
'long_description': long_description,
'long_description_content_type': "text/markdown",
'install_requires': [
'pynwb>=1.1.2'
],
'packages': find_packages('src/pynwb'),
'package_dir': {'': 'src/pynwb'},
'package_data': {'ndx_bipolar_scheme': [
'spec/ndx-bipolar-scheme.namespace.yaml',
'spec/ndx-bipolar-scheme.extensions.yaml',
]},
'classifiers': [
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
],
'zip_safe': False
}
def _copy_spec_files(project_dir):
ns_path = os.path.join(project_dir, 'spec', 'ndx-bipolar-scheme.namespace.yaml')
ext_path = os.path.join(project_dir, 'spec', 'ndx-bipolar-scheme.extensions.yaml')
dst_dir = os.path.join(project_dir, 'src', 'pynwb', 'ndx_bipolar_scheme', 'spec')
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
copy2(ns_path, dst_dir)
copy2(ext_path, dst_dir)
if __name__ == '__main__':
_copy_spec_files(os.path.dirname(__file__))
setup(**setup_args)
| 29.660714
| 86
| 0.661048
|
cc94922eb8b576bb0eb1e12003a969c9851f527f
| 4,976
|
py
|
Python
|
src/oci/healthchecks/models/tcp_connection.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 249
|
2017-09-11T22:06:05.000Z
|
2022-03-04T17:09:29.000Z
|
src/oci/healthchecks/models/tcp_connection.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 228
|
2017-09-11T23:07:26.000Z
|
2022-03-23T10:58:50.000Z
|
src/oci/healthchecks/models/tcp_connection.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 224
|
2017-09-27T07:32:43.000Z
|
2022-03-25T16:55:42.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class TcpConnection(object):
"""
TCP connection results. All durations are in milliseconds.
"""
def __init__(self, **kwargs):
"""
Initializes a new TcpConnection object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param address:
The value to assign to the address property of this TcpConnection.
:type address: str
:param port:
The value to assign to the port property of this TcpConnection.
:type port: int
:param connect_duration:
The value to assign to the connect_duration property of this TcpConnection.
:type connect_duration: float
:param secure_connect_duration:
The value to assign to the secure_connect_duration property of this TcpConnection.
:type secure_connect_duration: float
"""
self.swagger_types = {
'address': 'str',
'port': 'int',
'connect_duration': 'float',
'secure_connect_duration': 'float'
}
self.attribute_map = {
'address': 'address',
'port': 'port',
'connect_duration': 'connectDuration',
'secure_connect_duration': 'secureConnectDuration'
}
self._address = None
self._port = None
self._connect_duration = None
self._secure_connect_duration = None
@property
def address(self):
"""
Gets the address of this TcpConnection.
The connection IP address.
:return: The address of this TcpConnection.
:rtype: str
"""
return self._address
@address.setter
def address(self, address):
"""
Sets the address of this TcpConnection.
The connection IP address.
:param address: The address of this TcpConnection.
:type: str
"""
self._address = address
@property
def port(self):
"""
Gets the port of this TcpConnection.
The port.
:return: The port of this TcpConnection.
:rtype: int
"""
return self._port
@port.setter
def port(self, port):
"""
Sets the port of this TcpConnection.
The port.
:param port: The port of this TcpConnection.
:type: int
"""
self._port = port
@property
def connect_duration(self):
"""
Gets the connect_duration of this TcpConnection.
Total connect duration, calculated using `connectEnd` minus `connectStart`.
:return: The connect_duration of this TcpConnection.
:rtype: float
"""
return self._connect_duration
@connect_duration.setter
def connect_duration(self, connect_duration):
"""
Sets the connect_duration of this TcpConnection.
Total connect duration, calculated using `connectEnd` minus `connectStart`.
:param connect_duration: The connect_duration of this TcpConnection.
:type: float
"""
self._connect_duration = connect_duration
@property
def secure_connect_duration(self):
"""
Gets the secure_connect_duration of this TcpConnection.
The duration to secure the connection. This value will be zero for
insecure connections. Calculated using `connectEnd` minus `secureConnectionStart`.
:return: The secure_connect_duration of this TcpConnection.
:rtype: float
"""
return self._secure_connect_duration
@secure_connect_duration.setter
def secure_connect_duration(self, secure_connect_duration):
"""
Sets the secure_connect_duration of this TcpConnection.
The duration to secure the connection. This value will be zero for
insecure connections. Calculated using `connectEnd` minus `secureConnectionStart`.
:param secure_connect_duration: The secure_connect_duration of this TcpConnection.
:type: float
"""
self._secure_connect_duration = secure_connect_duration
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| 29.975904
| 245
| 0.643891
|
e1acaeef6a0ff37c959d6a1cd4a7c80b30c5a1f3
| 68
|
py
|
Python
|
main.py
|
mlasch/esp32-lora-wifi
|
c2ab6843c6e517ecdf8d461bca67d8954f02592a
|
[
"MIT"
] | 1
|
2020-10-07T12:39:15.000Z
|
2020-10-07T12:39:15.000Z
|
main.py
|
mlasch/esp32-lora-wifi
|
c2ab6843c6e517ecdf8d461bca67d8954f02592a
|
[
"MIT"
] | 1
|
2019-07-08T21:39:38.000Z
|
2019-07-08T21:39:38.000Z
|
main.py
|
mlasch/esp32-lora-wifi
|
c2ab6843c6e517ecdf8d461bca67d8954f02592a
|
[
"MIT"
] | null | null | null |
import counter
if __name__ == "__main__":
counter.main_loop()
| 11.333333
| 26
| 0.691176
|
796cd67351ecc533ef606386e09b5e4fcad424a1
| 18,295
|
py
|
Python
|
acestream/ACEStream/WebUI/WebUI.py
|
GrandPaRPi/p2ptv-pi
|
6f79c00f9055a3763ddfe1dc41e14d2cb533f4c3
|
[
"MIT"
] | null | null | null |
acestream/ACEStream/WebUI/WebUI.py
|
GrandPaRPi/p2ptv-pi
|
6f79c00f9055a3763ddfe1dc41e14d2cb533f4c3
|
[
"MIT"
] | null | null | null |
acestream/ACEStream/WebUI/WebUI.py
|
GrandPaRPi/p2ptv-pi
|
6f79c00f9055a3763ddfe1dc41e14d2cb533f4c3
|
[
"MIT"
] | 2
|
2018-04-17T17:34:39.000Z
|
2020-07-26T03:43:33.000Z
|
#Embedded file name: ACEStream\WebUI\WebUI.pyo
import sys, os
import time
import random
import urllib
import urlparse
import cgi
import binascii
import copy
from cStringIO import StringIO
from traceback import print_exc, print_stack
from threading import RLock, Condition
from base64 import encodestring
try:
import simplejson as json
except ImportError:
import json
from ACEStream.Core.API import *
from ACEStream.Core.BitTornado.bencode import *
from ACEStream.Video.VideoServer import AbstractPathMapper
from ACEStream.Core.Utilities.logger import log, log_exc
from ACEStream.Plugin.defs import *
DEBUG = False
def streaminfo404():
return {'statuscode': 404,
'statusmsg': '404 Not Found'}
class WebIFPathMapper(AbstractPathMapper):
binaryExtensions = ['.gif',
'.png',
'.jpg',
'.js',
'.css']
contentTypes = {'.css': 'text/css',
'.gif': 'image/gif',
'.jpg': 'image/jpg',
'.png': 'image/png',
'.js': 'text/javascript',
'.html': 'text/html'}
def __init__(self, bgApp, session):
self.bgApp = bgApp
self.session = session
self.downspeed = 0
self.upspeed = 0
self.lastreqtime = time.time()
if sys.platform == 'win32':
self.webUIPath = os.path.join(self.bgApp.installdir, 'webui')
else:
self.webUIPath = os.path.join(self.bgApp.installdir, 'data', 'webui')
if DEBUG:
log('webui: path', self.webUIPath)
self.session.set_download_states_callback(self.speed_callback)
def get(self, urlpath):
try:
return self.doget(urlpath)
except:
print_exc()
return None
def doget(self, urlpath):
if not urlpath.startswith(URLPATH_WEBIF_PREFIX):
return streaminfo404()
else:
self.lastreqtime = time.time()
try:
fakeurl = 'http://127.0.0.1' + urlpath[len(URLPATH_WEBIF_PREFIX):]
if DEBUG:
log('webui::doget: fakeurl', fakeurl)
request_url = urlparse.urlparse(fakeurl)
except:
print_exc()
return
path = request_url[2]
query_string = request_url[4]
query_params = urlparse.parse_qs(query_string)
if DEBUG:
log('webui::doget: urlpath', urlpath, 'request_url', request_url, 'path', path, 'query_params', query_params)
if len(path) == 0:
if DEBUG:
log('webui::doget: show status page')
page = self.statusPage()
pageStream = StringIO(page)
return {'statuscode': 200,
'mimetype': 'text/html',
'stream': pageStream,
'length': len(page)}
if path == 'permid.js':
try:
permid = encodestring(self.bgApp.s.get_permid()).replace('\n', '')
txt = "var permid = '%s';" % permid
dataStream = StringIO(txt)
except:
print_exc()
return {'statuscode': 500,
'statusmsg': 'Bad permid'}
return {'statuscode': 200,
'mimetype': 'text/javascript',
'stream': dataStream,
'length': len(txt)}
if path == '/createstream':
if DEBUG:
log('webui::doget: show create stream page')
page = self.createStreamPage()
pageStream = StringIO(page)
return {'statuscode': 200,
'mimetype': 'text/html',
'stream': pageStream,
'length': len(page)}
if path == '/dispatch':
if 'url' not in query_params:
if DEBUG:
log('webui::doget:dispatch: missing url')
return streaminfo404()
url = query_params['url'][0]
redirect_url = 'http://127.0.0.1:6878/webui/' + url
params = []
for name, val in query_params.iteritems():
if name != 'url':
params.append(urllib.quote_plus(name) + '=' + urllib.quote_plus(val[0]))
if len(params):
redirect_url += '?' + '&'.join(params)
if DEBUG:
log('webui::doget:dispatch: redirect_url', redirect_url)
page = '<!DOCTYPE html><html><head><script type="text/javascript">'
page += 'parent.location.href = "' + redirect_url + '";'
page += '</script></head><body></body></html>'
pageStream = StringIO(page)
return {'statuscode': 200,
'mimetype': 'text/html',
'stream': pageStream,
'length': len(page)}
if path.startswith('/player/') and query_params.has_key('a') and query_params['a'][0] == 'check':
player_id = path.split('/')[2]
redirect_url = 'http://127.0.0.1:6878/webui/player/' + player_id
params = []
for name, val in query_params.iteritems():
if name != 'a':
params.append(urllib.quote_plus(name) + '=' + urllib.quote_plus(val[0]))
if len(params):
redirect_url += '?' + '&'.join(params)
if DEBUG:
log('webui::doget:dispatch: redirect_url', redirect_url)
page = '<!DOCTYPE html><html><head><script type="text/javascript">'
page += 'parent.location.href = "' + redirect_url + '";'
page += '</script></head><body></body></html>'
pageStream = StringIO(page)
return {'statuscode': 200,
'mimetype': 'text/html',
'stream': pageStream,
'length': len(page)}
if path.startswith('/player/'):
player_id = path.split('/')[2]
if DEBUG:
log('webui::doget: show player page: id', player_id)
params = {}
for name, val in query_params.iteritems():
params[name] = val[0]
page = self.playerPage(player_id, params)
pageStream = StringIO(page)
return {'statuscode': 200,
'mimetype': 'text/html',
'stream': pageStream,
'length': len(page)}
static_path = None
json_query = None
if path.startswith('/json/'):
json_query = request_url[4]
else:
static_path = os.path.join(self.webUIPath, path[1:])
if DEBUG:
log('webui::doget: request parsed: static_path', static_path, 'json_query', json_query)
if static_path is not None:
if not os.path.isfile(static_path):
if DEBUG:
log('webui::doget: file not found:', static_path)
return streaminfo404()
extension = os.path.splitext(static_path)[1]
if extension in self.binaryExtensions:
mode = 'rb'
else:
mode = 'r'
fp = open(static_path, mode)
data = fp.read()
fp.close()
dataStream = StringIO(data)
return {'statuscode': 200,
'mimetype': self.getContentType(extension),
'stream': dataStream,
'length': len(data)}
if json_query is not None:
params = {}
for s in json_query.split('&'):
name, value = s.split('=')
params[name] = value
if DEBUG:
log('webui:doget: got json request:', json_query, 'params', params)
if 'q' not in params:
return
try:
req = urllib.unquote(params['q'])
if DEBUG:
log('webui::doget: parse json: req', req)
jreq = json.loads(req)
if DEBUG:
log('webui::doget: parse json done: jreq', jreq)
except:
print_exc()
return
try:
method = jreq['method']
except:
return {'statuscode': 504,
'statusmsg': 'Json request in wrong format! At least a method has to be specified!'}
try:
args = jreq['arguments']
if DEBUG:
print >> sys.stderr, 'webUI: Got JSON request: ', jreq, '; method: ', method, '; arguments: ', args
except:
args = None
if DEBUG:
print >> sys.stderr, 'webUI: Got JSON request: ', jreq, '; method: ', method
if args is None:
data = self.process_json_request(method)
if DEBUG:
print >> sys.stderr, 'WebUI: response to JSON ', method, ' request: ', data
else:
data = self.process_json_request(method, args)
if DEBUG:
print >> sys.stderr, 'WebUI: response to JSON ', method, ' request: ', data, ' arguments: ', args
if data == 'Args missing':
return {'statuscode': 504,
'statusmsg': 'Json request in wrong format! Arguments have to be specified!'}
dataStream = StringIO(data)
return {'statuscode': 200,
'mimetype': 'application/json',
'stream': dataStream,
'length': len(data)}
if DEBUG:
log('webui::doget: unknow request format: request_url', request_url)
return streaminfo404()
def process_json_request(self, method, args = None):
try:
return self.doprocess_json_request(method, args=args)
except:
print_exc()
return json.JSONEncoder().encode({'success': 'false'})
def doprocess_json_request(self, method, args = None):
if args is not None and args.has_key('id'):
infohash = urllib.unquote(str(args['id']))
else:
infohash = None
if DEBUG:
print >> sys.stderr, 'WebUI: received JSON request for method: ', method
if method == 'get_all_downloads':
condition = Condition()
dlist = []
states_func = lambda dslist: self.states_callback(dslist, condition, dlist)
self.session.set_download_states_callback(states_func)
condition.acquire()
condition.wait(5.0)
condition.release()
return json.JSONEncoder().encode({'downloads': dlist})
if method == 'pause_all':
try:
func = lambda : self.bgApp.gui_webui_stop_all_downloads(self.session.get_downloads())
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'resume_all':
try:
func = lambda : self.bgApp.gui_webui_restart_all_downloads(self.session.get_downloads())
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'remove_all':
try:
func = lambda : self.bgApp.gui_webui_remove_all_downloads(self.session.get_downloads())
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
else:
if method == 'get_speed_info':
return json.JSONEncoder().encode({'success': 'true',
'downspeed': self.downspeed,
'upspeed': self.upspeed})
if args is None:
return 'Args missing'
if method == 'pause_dl':
try:
downloads = self.session.get_downloads()
for dl in downloads:
if dl.get_def().get_infohash() == infohash:
func = lambda : self.bgApp.gui_webui_stop_download(dl)
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'resume_dl':
try:
downloads = self.session.get_downloads()
for dl in downloads:
if dl.get_def().get_infohash() == infohash:
func = lambda : self.bgApp.gui_webui_restart_download(dl)
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'remove_dl':
try:
downloads = self.session.get_downloads()
for dl in downloads:
if dl.get_def().get_infohash() == infohash:
func = lambda : self.bgApp.gui_webui_remove_download(dl)
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'save_dl':
try:
if args is not None:
path = urllib.unquote(str(args['path']))
else:
raise Exception, 'Missing path in request'
downloads = self.session.get_downloads()
for dl in downloads:
if dl.get_type() == DLTYPE_TORRENT and dl.get_def().get_infohash() == infohash:
func = lambda : self.bgApp.gui_webui_save_download(dl, path)
self.bgApp.run_delayed(func)
return json.JSONEncoder().encode({'success': 'true'})
except:
return json.JSONEncoder().encode({'success': 'false'})
elif method == 'create_stream':
if DEBUG:
log('webui: createstream: args', args)
try:
self.bgApp.gui_webui_create_stream(args)
return json.JSONEncoder().encode({'success': 'true'})
except Exception as e:
if DEBUG:
print_exc()
return json.JSONEncoder().encode({'success': 'false',
'error': str(e)})
else:
raise Exception, 'Unknown method ' + method
def states_callback(self, dslist, condition, dlist):
for ds in dslist:
d = ds.get_download()
infohash = urllib.quote(d.get_hash())
dl = {'id': infohash,
'name': d.get_def().get_name(),
'status': dlstatus_strings[ds.get_status()],
'progress': ds.get_progress(),
'upload': ds.get_current_speed(UPLOAD),
'download': ds.get_current_speed(DOWNLOAD)}
dlist.append(dl)
condition.acquire()
condition.notify()
condition.release()
return (0.0, False)
def speed_callback(self, dslist):
upspeed = 0
downspeed = 0
for ds in dslist:
d = ds.get_download()
upspeed += ds.get_current_speed(UPLOAD)
downspeed += ds.get_current_speed(DOWNLOAD)
self.downspeed = downspeed
self.upspeed = upspeed
return (1.0, False)
def statusPage(self):
page = '<!DOCTYPE html>'
page += '<html>\n'
header = os.path.join(self.webUIPath, 'html', 'head.html')
if DEBUG:
log('webui::statusPage: header', header)
if os.path.isfile(header):
f = open(header)
head = f.read()
f.close
page += head
body = os.path.join(self.webUIPath, 'html', 'body.html')
if DEBUG:
log('webui::statusPage: body', body)
if os.path.isfile(body):
f = open(body)
tmp = f.read()
f.close
page += tmp
page += '</html>'
return page
def createStreamPage(self):
path = os.path.join(self.webUIPath, 'html', 'create_stream.html')
if not os.path.isfile(path):
return ''
f = open(path)
html = f.read()
f.close()
destdir = self.bgApp.get_default_destdir()
if isinstance(destdir, unicode):
destdir = destdir.encode('utf-8')
html = html.replace('{dest_dir}', destdir)
return html
def playerPage(self, player_id, params):
path = os.path.join(self.webUIPath, 'html', 'player.html')
if not os.path.isfile(path):
return ''
f = open(path)
html = f.read()
f.close()
if 'autoplay' in params and params['autoplay'] == 'true':
autoplay = 'true'
else:
autoplay = 'false'
html = html.replace('{player_id}', player_id)
html = html.replace('{autoplay}', autoplay)
return html
def getContentType(self, ext):
content_type = 'text/plain'
if ext in self.contentTypes:
content_type = self.contentTypes[ext]
return content_type
| 39.685466
| 125
| 0.495545
|
28dd930fdeac75c98d0f8456422aa9c359e6686a
| 3,432
|
py
|
Python
|
django_work_excel/settings.py
|
E1mir/django-excel-handler
|
10c640b323332703d1446eba41fb2bcf160cbef7
|
[
"MIT"
] | null | null | null |
django_work_excel/settings.py
|
E1mir/django-excel-handler
|
10c640b323332703d1446eba41fb2bcf160cbef7
|
[
"MIT"
] | null | null | null |
django_work_excel/settings.py
|
E1mir/django-excel-handler
|
10c640b323332703d1446eba41fb2bcf160cbef7
|
[
"MIT"
] | null | null | null |
"""
Django settings for django_work_excel project.
Generated by 'django-admin startproject' using Django 2.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
FILES_DIR = os.path.join(BASE_DIR, 'files')
TEMPLATE_DIR = os.path.join(FILES_DIR, 'templates')
STATIC_DIR = os.path.join(FILES_DIR, 'static')
MEDIA_DIR = os.path.join(FILES_DIR, 'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'lln7$-!d!&4r7sjyr+*8+^p*2rn!!2!64@v$derct3%na7(tzo'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'excel_handler'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_work_excel.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATE_DIR, ]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_work_excel.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
STATIC_DIR,
]
MEDIA_URL = '/media/'
MEDIA_ROOT = MEDIA_DIR
| 26.8125
| 91
| 0.699592
|
bb0c2b720b601d70b46aed9bc2523825b3c315ec
| 11,903
|
bzl
|
Python
|
tools/build_defs/pkg/pkg.bzl
|
erenon/bazel
|
9bf885afeb01c766d84acf86ca847a7b5e7bd0d8
|
[
"Apache-2.0"
] | 2
|
2020-01-30T14:09:52.000Z
|
2021-08-21T22:00:59.000Z
|
tools/build_defs/pkg/pkg.bzl
|
erenon/bazel
|
9bf885afeb01c766d84acf86ca847a7b5e7bd0d8
|
[
"Apache-2.0"
] | null | null | null |
tools/build_defs/pkg/pkg.bzl
|
erenon/bazel
|
9bf885afeb01c766d84acf86ca847a7b5e7bd0d8
|
[
"Apache-2.0"
] | 1
|
2018-04-19T15:01:33.000Z
|
2018-04-19T15:01:33.000Z
|
# Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rules for manipulation of various packaging."""
load(":path.bzl", "compute_data_path", "dest_path")
# Filetype to restrict inputs
tar_filetype = [".tar", ".tar.gz", ".tgz", ".tar.xz", ".tar.bz2"]
deb_filetype = [".deb", ".udeb"]
def _remap(remap_paths, path):
"""If path starts with a key in remap_paths, rewrite it."""
for prefix, replacement in remap_paths.items():
if path.startswith(prefix):
return replacement + path[len(prefix):]
return path
def _quote(filename, protect = "="):
"""Quote the filename, by escaping = by \= and \ by \\"""
return filename.replace("\\", "\\\\").replace(protect, "\\" + protect)
def _pkg_tar_impl(ctx):
"""Implementation of the pkg_tar rule."""
# Compute the relative path
data_path = compute_data_path(ctx.outputs.out, ctx.attr.strip_prefix)
# Find a list of path remappings to apply.
remap_paths = ctx.attr.remap_paths
# Start building the arguments.
args = [
"--output=" + ctx.outputs.out.path,
"--directory=" + ctx.attr.package_dir,
"--mode=" + ctx.attr.mode,
"--owner=" + ctx.attr.owner,
"--owner_name=" + ctx.attr.ownername,
]
# Add runfiles if requested
file_inputs = []
if ctx.attr.include_runfiles:
runfiles_depsets = []
for f in ctx.attr.srcs:
default_runfiles = f[DefaultInfo].default_runfiles
if default_runfiles != None:
runfiles_depsets.append(default_runfiles.files)
# deduplicates files in srcs attribute and their runfiles
file_inputs = depset(ctx.files.srcs, transitive = runfiles_depsets).to_list()
else:
file_inputs = ctx.files.srcs[:]
args += [
"--file=%s=%s" % (_quote(f.path), _remap(remap_paths, dest_path(f, data_path)))
for f in file_inputs
]
for target, f_dest_path in ctx.attr.files.items():
target_files = target.files.to_list()
if len(target_files) != 1:
fail("Each input must describe exactly one file.", attr = "files")
file_inputs += target_files
args += ["--file=%s=%s" % (_quote(target_files[0].path), f_dest_path)]
if ctx.attr.modes:
args += [
"--modes=%s=%s" % (_quote(key), ctx.attr.modes[key])
for key in ctx.attr.modes
]
if ctx.attr.owners:
args += [
"--owners=%s=%s" % (_quote(key), ctx.attr.owners[key])
for key in ctx.attr.owners
]
if ctx.attr.ownernames:
args += [
"--owner_names=%s=%s" % (_quote(key), ctx.attr.ownernames[key])
for key in ctx.attr.ownernames
]
if ctx.attr.empty_files:
args += ["--empty_file=%s" % empty_file for empty_file in ctx.attr.empty_files]
if ctx.attr.empty_dirs:
args += ["--empty_dir=%s" % empty_dir for empty_dir in ctx.attr.empty_dirs]
if ctx.attr.extension:
dotPos = ctx.attr.extension.find(".")
if dotPos > 0:
dotPos += 1
args += ["--compression=%s" % ctx.attr.extension[dotPos:]]
elif ctx.attr.extension == "tgz":
args += ["--compression=gz"]
args += ["--tar=" + f.path for f in ctx.files.deps]
args += [
"--link=%s:%s" % (_quote(k, protect = ":"), ctx.attr.symlinks[k])
for k in ctx.attr.symlinks
]
arg_file = ctx.actions.declare_file(ctx.label.name + ".args")
ctx.actions.write(arg_file, "\n".join(args))
ctx.actions.run(
inputs = file_inputs + ctx.files.deps + [arg_file],
executable = ctx.executable.build_tar,
arguments = ["--flagfile", arg_file.path],
outputs = [ctx.outputs.out],
mnemonic = "PackageTar",
use_default_shell_env = True,
)
def _pkg_deb_impl(ctx):
"""The implementation for the pkg_deb rule."""
files = [ctx.file.data]
args = [
"--output=" + ctx.outputs.deb.path,
"--changes=" + ctx.outputs.changes.path,
"--data=" + ctx.file.data.path,
"--package=" + ctx.attr.package,
"--architecture=" + ctx.attr.architecture,
"--maintainer=" + ctx.attr.maintainer,
]
if ctx.attr.preinst:
args += ["--preinst=@" + ctx.file.preinst.path]
files += [ctx.file.preinst]
if ctx.attr.postinst:
args += ["--postinst=@" + ctx.file.postinst.path]
files += [ctx.file.postinst]
if ctx.attr.prerm:
args += ["--prerm=@" + ctx.file.prerm.path]
files += [ctx.file.prerm]
if ctx.attr.postrm:
args += ["--postrm=@" + ctx.file.postrm.path]
files += [ctx.file.postrm]
# Conffiles can be specified by a file or a string list
if ctx.attr.conffiles_file:
if ctx.attr.conffiles:
fail("Both conffiles and conffiles_file attributes were specified")
args += ["--conffile=@" + ctx.file.conffiles_file.path]
files += [ctx.file.conffiles_file]
elif ctx.attr.conffiles:
args += ["--conffile=%s" % cf for cf in ctx.attr.conffiles]
# Version and description can be specified by a file or inlined
if ctx.attr.version_file:
if ctx.attr.version:
fail("Both version and version_file attributes were specified")
args += ["--version=@" + ctx.file.version_file.path]
files += [ctx.file.version_file]
elif ctx.attr.version:
args += ["--version=" + ctx.attr.version]
else:
fail("Neither version_file nor version attribute was specified")
if ctx.attr.description_file:
if ctx.attr.description:
fail("Both description and description_file attributes were specified")
args += ["--description=@" + ctx.file.description_file.path]
files += [ctx.file.description_file]
elif ctx.attr.description:
args += ["--description=" + ctx.attr.description]
else:
fail("Neither description_file nor description attribute was specified")
# Built using can also be specified by a file or inlined (but is not mandatory)
if ctx.attr.built_using_file:
if ctx.attr.built_using:
fail("Both build_using and built_using_file attributes were specified")
args += ["--built_using=@" + ctx.file.built_using_file.path]
files += [ctx.file.built_using_file]
elif ctx.attr.built_using:
args += ["--built_using=" + ctx.attr.built_using]
if ctx.attr.priority:
args += ["--priority=" + ctx.attr.priority]
if ctx.attr.section:
args += ["--section=" + ctx.attr.section]
if ctx.attr.homepage:
args += ["--homepage=" + ctx.attr.homepage]
args += ["--distribution=" + ctx.attr.distribution]
args += ["--urgency=" + ctx.attr.urgency]
args += ["--depends=" + d for d in ctx.attr.depends]
args += ["--suggests=" + d for d in ctx.attr.suggests]
args += ["--enhances=" + d for d in ctx.attr.enhances]
args += ["--conflicts=" + d for d in ctx.attr.conflicts]
args += ["--pre_depends=" + d for d in ctx.attr.predepends]
args += ["--recommends=" + d for d in ctx.attr.recommends]
ctx.actions.run(
executable = ctx.executable.make_deb,
arguments = args,
inputs = files,
outputs = [ctx.outputs.deb, ctx.outputs.changes],
mnemonic = "MakeDeb",
)
ctx.actions.run_shell(
command = "ln -s %s %s" % (ctx.outputs.deb.basename, ctx.outputs.out.path),
inputs = [ctx.outputs.deb],
outputs = [ctx.outputs.out],
)
# A rule for creating a tar file, see README.md
_real_pkg_tar = rule(
implementation = _pkg_tar_impl,
attrs = {
"strip_prefix": attr.string(),
"package_dir": attr.string(default = "/"),
"deps": attr.label_list(allow_files = tar_filetype),
"srcs": attr.label_list(allow_files = True),
"files": attr.label_keyed_string_dict(allow_files = True),
"mode": attr.string(default = "0555"),
"modes": attr.string_dict(),
"owner": attr.string(default = "0.0"),
"ownername": attr.string(default = "."),
"owners": attr.string_dict(),
"ownernames": attr.string_dict(),
"extension": attr.string(default = "tar"),
"symlinks": attr.string_dict(),
"empty_files": attr.string_list(),
"include_runfiles": attr.bool(),
"empty_dirs": attr.string_list(),
"remap_paths": attr.string_dict(),
# Implicit dependencies.
"build_tar": attr.label(
default = Label("//tools/build_defs/pkg:build_tar"),
cfg = "host",
executable = True,
allow_files = True,
),
},
outputs = {
"out": "%{name}.%{extension}",
},
)
def pkg_tar(**kwargs):
# Compatibility with older versions of pkg_tar that define files as
# a flat list of labels.
if "srcs" not in kwargs:
if "files" in kwargs:
if not hasattr(kwargs["files"], "items"):
label = "%s//%s:%s" % (native.repository_name(), native.package_name(), kwargs["name"])
print("%s: you provided a non dictionary to the pkg_tar `files` attribute. " % (label,) +
"This attribute was renamed to `srcs`. " +
"Consider renaming it in your BUILD file.")
kwargs["srcs"] = kwargs.pop("files")
_real_pkg_tar(**kwargs)
# A rule for creating a deb file, see README.md
pkg_deb = rule(
implementation = _pkg_deb_impl,
attrs = {
"data": attr.label(mandatory = True, allow_single_file = tar_filetype),
"package": attr.string(mandatory = True),
"architecture": attr.string(default = "all"),
"distribution": attr.string(default = "unstable"),
"urgency": attr.string(default = "medium"),
"maintainer": attr.string(mandatory = True),
"preinst": attr.label(allow_single_file = True),
"postinst": attr.label(allow_single_file = True),
"prerm": attr.label(allow_single_file = True),
"postrm": attr.label(allow_single_file = True),
"conffiles_file": attr.label(allow_single_file = True),
"conffiles": attr.string_list(default = []),
"version_file": attr.label(allow_single_file = True),
"version": attr.string(),
"description_file": attr.label(allow_single_file = True),
"description": attr.string(),
"built_using_file": attr.label(allow_single_file = True),
"built_using": attr.string(),
"priority": attr.string(),
"section": attr.string(),
"homepage": attr.string(),
"depends": attr.string_list(default = []),
"suggests": attr.string_list(default = []),
"enhances": attr.string_list(default = []),
"conflicts": attr.string_list(default = []),
"predepends": attr.string_list(default = []),
"recommends": attr.string_list(default = []),
# Implicit dependencies.
"make_deb": attr.label(
default = Label("//tools/build_defs/pkg:make_deb"),
cfg = "host",
executable = True,
allow_files = True,
),
},
outputs = {
"out": "%{name}.deb",
"deb": "%{package}_%{version}_%{architecture}.deb",
"changes": "%{package}_%{version}_%{architecture}.changes",
},
)
| 39.413907
| 105
| 0.599933
|
09738fe0662bf08e0fbc7a336fd7aee7b05c65ad
| 4,882
|
py
|
Python
|
huey/contrib/djhuey/management/commands/run_huey.py
|
winkidney/huey
|
cda66da19e8a92d98453b53e106c3a74c20f640b
|
[
"MIT"
] | null | null | null |
huey/contrib/djhuey/management/commands/run_huey.py
|
winkidney/huey
|
cda66da19e8a92d98453b53e106c3a74c20f640b
|
[
"MIT"
] | null | null | null |
huey/contrib/djhuey/management/commands/run_huey.py
|
winkidney/huey
|
cda66da19e8a92d98453b53e106c3a74c20f640b
|
[
"MIT"
] | 1
|
2019-10-22T13:04:23.000Z
|
2019-10-22T13:04:23.000Z
|
import imp
import sys
from importlib import import_module
from optparse import make_option
import django
from django.conf import settings
from django.core.management.base import BaseCommand
try:
from django.apps import apps as django_apps
HAS_DJANGO_APPS = True
except ImportError:
# Django 1.6
HAS_DJANGO_APPS = False
from huey.consumer import Consumer
from huey.bin.huey_consumer import get_loglevel
from huey.bin.huey_consumer import setup_logger
class CompatParser(object):
"""Converts argeparse arguments to optparse for Django < 1.8 compatibility."""
def __init__(self, command):
self.command = command
def add_argument(self, *args, **kwargs):
if 'type' in kwargs:
# Convert `type=int` to `type="int"`, etc.
kwargs['type'] = kwargs['type'].__name__
self.command.option_list += (make_option(*args, **kwargs),)
class Command(BaseCommand):
"""
Queue consumer. Example usage::
To start the consumer (note you must export the settings module):
django-admin.py run_huey
"""
help = "Run the queue consumer"
def __init__(self, *args, **kwargs):
if django.VERSION < (1, 8):
self.option_list = BaseCommand.option_list
parser = CompatParser(self)
self.add_arguments(parser)
def add_arguments(self, parser):
parser.add_argument(
'--workers', '-w',
dest='workers',
type=int,
help='Number of worker threads/processes/greenlets')
parser.add_argument(
'--worker-type', '-k',
dest='worker_type',
help='worker execution model (thread, greenlet, process).',
default='thread',
choices=['greenlet', 'thread', 'process', 'gevent'])
parser.add_argument(
'--delay', '-d',
dest='initial_delay',
type=float,
help='Delay between polling requests')
parser.add_argument(
'--max_delay', '-m',
dest='max_delay',
type=float,
help='Maximum delay between polling requests')
parser.add_argument(
'--no-periodic', '-n',
default=True,
dest='periodic',
action='store_false',
help='Do not enqueue periodic commands')
def autodiscover_appconfigs(self):
"""Use Django app registry to pull out potential apps with tasks.py module."""
module_name = 'tasks'
for config in django_apps.get_app_configs():
app_path = config.module.__path__
try:
fp, path, description = imp.find_module(module_name, app_path)
except ImportError:
continue
else:
import_path = '%s.%s' % (config.name, module_name)
imp.load_module(import_path, fp, path, description)
def autodiscover_old(self):
# this is to find modules named <commands.py> in a django project's
# installed apps directories
module_name = 'tasks'
for app in settings.INSTALLED_APPS:
try:
import_module(app)
app_path = sys.modules[app].__path__
except AttributeError:
continue
try:
imp.find_module(module_name, app_path)
except ImportError:
continue
import_module('%s.%s' % (app, module_name))
app_path = sys.modules['%s.%s' % (app, module_name)]
def autodiscover(self):
"""Switch between Django 1.7 style and old style app importing."""
if HAS_DJANGO_APPS:
self.autodiscover_appconfigs()
else:
self.autodiscover_old()
def handle(self, *args, **options):
from huey.contrib.djhuey import HUEY
consumer_options = {}
if isinstance(settings.HUEY, dict):
consumer_options.update(settings.HUEY.get('consumer', {}))
if options['workers'] is not None:
consumer_options['workers'] = options['workers']
if options['worker_type'] is not None:
consumer_options['worker_type'] = options['worker_type']
if options['periodic'] is not None:
consumer_options['periodic'] = options['periodic']
if options['initial_delay'] is not None:
consumer_options['initial_delay'] = options['initial_delay']
if options['max_delay'] is not None:
consumer_options['max_delay'] = options['max_delay']
self.autodiscover()
loglevel = get_loglevel(consumer_options.pop('loglevel', None))
logfile = consumer_options.pop('logfile', None)
setup_logger(loglevel, logfile, consumer_options['worker_type'])
consumer = Consumer(HUEY, **consumer_options)
consumer.run()
| 33.210884
| 86
| 0.601803
|
05f6685e5b307624aa8f6a1c47e5d5db16b893c3
| 3,509
|
py
|
Python
|
payment_handler/payment_handler/settings.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | null | null | null |
payment_handler/payment_handler/settings.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | null | null | null |
payment_handler/payment_handler/settings.py
|
KartashevaAnna/Payment-platform
|
c229894de5dd2b0f745b1d363fac2fcc9372413c
|
[
"MIT"
] | null | null | null |
"""
Django settings for payment_handler project.
Generated by 'django-admin startproject' using Django 4.0.3.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/4.0/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-v#yu2-$w-$de57cwp7u6$!j=8y(q51_ht5*&fp-w3p6cr)v)tc'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'handler.apps.HandlerConfig',
'users.apps.UsersConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
]
ROOT_URLCONF = 'payment_handler.urls'
TEMPLATES_DIR = os.path.join(BASE_DIR, 'templates')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [TEMPLATES_DIR],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'payment_handler.wsgi.application'
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
| 26.992308
| 91
| 0.706184
|
937f50d33e0cd9c4458756f303b58f94cac7b033
| 701
|
py
|
Python
|
app/schemas/form_element_list_value.py
|
attachemd/backend_v03
|
c14b1b9e1c00187f5df8c3a3a4407eb5938aa314
|
[
"MIT"
] | null | null | null |
app/schemas/form_element_list_value.py
|
attachemd/backend_v03
|
c14b1b9e1c00187f5df8c3a3a4407eb5938aa314
|
[
"MIT"
] | null | null | null |
app/schemas/form_element_list_value.py
|
attachemd/backend_v03
|
c14b1b9e1c00187f5df8c3a3a4407eb5938aa314
|
[
"MIT"
] | null | null | null |
from typing import Optional
from pydantic import BaseModel
# Shared properties
class FormElementListValueBase(BaseModel):
name: str
form_element_field_id: str
# Properties to receive via API on creation
class FormElementListValueCreate(FormElementListValueBase):
pass
class FormElementListValueCreateForRoute(BaseModel):
name: str
# form_element_field_id: str
# Properties to receive via API on update
class FormElementListValueUpdate(BaseModel):
pass
class FormElementListValueInDBBase(FormElementListValueBase):
class Config:
orm_mode = True
# Additional properties to return via API
class FormElementListValue(FormElementListValueInDBBase):
pass
| 20.617647
| 61
| 0.794579
|
5d59d0381323360d4c9a0d1d003ec666aa826285
| 16,024
|
py
|
Python
|
detectron2_ofa/engine/hooks.py
|
qdmy/Adelaidet-Quantization
|
e88cf41c62dc3944d2bd57ffc1d365535b0a1c4b
|
[
"Apache-2.0"
] | null | null | null |
detectron2_ofa/engine/hooks.py
|
qdmy/Adelaidet-Quantization
|
e88cf41c62dc3944d2bd57ffc1d365535b0a1c4b
|
[
"Apache-2.0"
] | null | null | null |
detectron2_ofa/engine/hooks.py
|
qdmy/Adelaidet-Quantization
|
e88cf41c62dc3944d2bd57ffc1d365535b0a1c4b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import datetime
import logging
import os
import tempfile
import time
from collections import Counter
from gpuinfo import GPUInfo
import torch
from fvcore.common.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer
from fvcore.common.file_io import PathManager
from fvcore.common.timer import Timer
from fvcore.nn.precise_bn import get_bn_modules, update_bn_stats
import detectron2_ofa.utils.comm as comm
from detectron2_ofa.evaluation.testing import flatten_results_dict
from detectron2_ofa.utils.events import EventStorage, EventWriter
from .train_loop import HookBase
__all__ = [
"CallbackHook",
"IterationTimer",
"PeriodicWriter",
"PeriodicCheckpointer",
"LRScheduler",
"AutogradProfiler",
"EvalHook",
"PreciseBN",
]
"""
Implement some common hooks.
"""
class CallbackHook(HookBase):
"""
Create a hook using callback functions provided by the user.
"""
def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None):
"""
Each argument is a function that takes one argument: the trainer.
"""
self._before_train = before_train
self._before_step = before_step
self._after_step = after_step
self._after_train = after_train
def before_train(self):
if self._before_train:
self._before_train(self.trainer)
def after_train(self):
if self._after_train:
self._after_train(self.trainer)
# The functions may be closures that hold reference to the trainer
# Therefore, delete them to avoid circular reference.
del self._before_train, self._after_train
del self._before_step, self._after_step
def before_step(self):
if self._before_step:
self._before_step(self.trainer)
def after_step(self):
if self._after_step:
self._after_step(self.trainer)
class IterationTimer(HookBase):
"""
Track the time spent for each iteration (each run_step call in the trainer).
Print a summary in the end of training.
This hook uses the time between the call to its :meth:`before_step`
and :meth:`after_step` methods.
Under the convention that :meth:`before_step` of all hooks should only
take negligible amount of time, the :class:`IterationTimer` hook should be
placed at the beginning of the list of hooks to obtain accurate timing.
"""
def __init__(self, warmup_iter=3):
"""
Args:
warmup_iter (int): the number of iterations at the beginning to exclude
from timing.
"""
self._warmup_iter = warmup_iter
self._step_timer = Timer()
def before_train(self):
self._start_time = time.perf_counter()
self._total_timer = Timer()
self._total_timer.pause()
def after_train(self):
logger = logging.getLogger(__name__)
total_time = time.perf_counter() - self._start_time
total_time_minus_hooks = self._total_timer.seconds()
hook_time = total_time - total_time_minus_hooks
num_iter = self.trainer.iter + 1 - self.trainer.start_iter - self._warmup_iter
if num_iter > 0 and total_time_minus_hooks > 0:
# Speed is meaningful only after warmup
# NOTE this format is parsed by grep in some scripts
logger.info(
"Overall training speed: {} iterations in {} ({:.4f} s / it)".format(
num_iter,
str(datetime.timedelta(seconds=int(total_time_minus_hooks))),
total_time_minus_hooks / num_iter,
)
)
logger.info(
"Total training time: {} ({} on hooks)".format(
str(datetime.timedelta(seconds=int(total_time))),
str(datetime.timedelta(seconds=int(hook_time))),
)
)
def before_step(self):
self._step_timer.reset()
self._total_timer.resume()
def after_step(self):
# +1 because we're in after_step
iter_done = self.trainer.iter - self.trainer.start_iter + 1
if iter_done >= self._warmup_iter:
sec = self._step_timer.seconds()
self.trainer.storage.put_scalars(time=sec)
else:
self._start_time = time.perf_counter()
self._total_timer.reset()
self._total_timer.pause()
class PeriodicWriter(HookBase):
"""
Write events to EventStorage periodically.
It is executed every ``period`` iterations and after the last iteration.
"""
def __init__(self, writers, period=20):
"""
Args:
writers (list[EventWriter]): a list of EventWriter objects
period (int):
"""
self._writers = writers
for w in writers:
assert isinstance(w, EventWriter), w
self._period = period
self._logger = logging.getLogger(__name__)
def after_step(self):
if (self.trainer.iter + 1) % self._period == 0 or (
self.trainer.iter == self.trainer.max_iter - 1
):
for writer in self._writers:
writer.write()
percent, memory = GPUInfo.gpu_usage()
self._logger.info("precent: {}, memory: {}".format(percent, memory))
def after_train(self):
for writer in self._writers:
writer.close()
class PeriodicCheckpointer(_PeriodicCheckpointer, HookBase):
"""
Same as :class:`detectron2.checkpoint.PeriodicCheckpointer`, but as a hook.
Note that when used as a hook,
it is unable to save additional data other than what's defined
by the given `checkpointer`.
It is executed every ``period`` iterations and after the last iteration.
"""
def before_train(self):
self.max_iter = self.trainer.max_iter
def after_step(self):
# No way to use **kwargs
self.step(self.trainer.iter)
class LRScheduler(HookBase):
"""
A hook which executes a torch builtin LR scheduler and summarizes the LR.
It is executed after every iteration.
"""
def __init__(self, optimizer, scheduler):
"""
Args:
optimizer (torch.optim.Optimizer):
scheduler (torch.optim._LRScheduler)
"""
self._optimizer = optimizer
self._scheduler = scheduler
# NOTE: some heuristics on what LR to summarize
# summarize the param group with most parameters
largest_group = max(len(g["params"]) for g in optimizer.param_groups)
if largest_group == 1:
# If all groups have one parameter,
# then find the most common initial LR, and use it for summary
lr_count = Counter([g["lr"] for g in optimizer.param_groups])
lr = lr_count.most_common()[0][0]
for i, g in enumerate(optimizer.param_groups):
if g["lr"] == lr:
self._best_param_group_id = i
break
else:
for i, g in enumerate(optimizer.param_groups):
if len(g["params"]) == largest_group:
self._best_param_group_id = i
break
def after_step(self):
lr = self._optimizer.param_groups[self._best_param_group_id]["lr"]
self.trainer.storage.put_scalar("lr", lr, smoothing_hint=False)
self._scheduler.step()
class DoubleForwardLRScheduler(HookBase):
"""
A hook which executes a torch builtin LR scheduler and summarizes the LR.
It is executed after every iteration.
"""
def __init__(self, optimizer, scale_factor=0.5):
"""
Args:
optimizer (torch.optim.Optimizer):
scheduler (torch.optim._LRScheduler)
"""
self._optimizer = optimizer
self._scale_factor = scale_factor
self.num_groups = len(self._optimizer.param_groups)
self.original_lr = [0.1] * self.num_groups
def change_lr(self):
# backup learning rate
for i, g in enumerate(self._optimizer.param_groups):
self.original_lr[i] = g["lr"]
g["lr"] = self.original_lr[i] * 0.5
def recover_lr(self):
for i, g in enumerate(self._optimizer.param_groups):
g["lr"] = self.original_lr[i]
class AutogradProfiler(HookBase):
"""
A hook which runs `torch.autograd.profiler.profile`.
Examples:
.. code-block:: python
hooks.AutogradProfiler(
lambda trainer: trainer.iter > 10 and trainer.iter < 20, self.cfg.OUTPUT_DIR
)
The above example will run the profiler for iteration 10~20 and dump
results to ``OUTPUT_DIR``. We did not profile the first few iterations
because they are typically slower than the rest.
The result files can be loaded in the ``chrome://tracing`` page in chrome browser.
Note:
When used together with NCCL on older version of GPUs,
autograd profiler may cause deadlock because it unnecessarily allocates
memory on every device it sees. The memory management calls, if
interleaved with NCCL calls, lead to deadlock on GPUs that do not
support `cudaLaunchCooperativeKernelMultiDevice`.
"""
def __init__(self, enable_predicate, output_dir, *, use_cuda=True):
"""
Args:
enable_predicate (callable[trainer -> bool]): a function which takes a trainer,
and returns whether to enable the profiler.
It will be called once every step, and can be used to select which steps to profile.
output_dir (str): the output directory to dump tracing files.
use_cuda (bool): same as in `torch.autograd.profiler.profile`.
"""
self._enable_predicate = enable_predicate
self._use_cuda = use_cuda
self._output_dir = output_dir
def before_step(self):
if self._enable_predicate(self.trainer):
self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda)
self._profiler.__enter__()
else:
self._profiler = None
def after_step(self):
if self._profiler is None:
return
self._profiler.__exit__(None, None, None)
out_file = os.path.join(
self._output_dir, "profiler-trace-iter{}.json".format(self.trainer.iter)
)
if "://" not in out_file:
self._profiler.export_chrome_trace(out_file)
else:
# Support non-posix filesystems
with tempfile.TemporaryDirectory(prefix="detectron2_ofa_profiler") as d:
tmp_file = os.path.join(d, "tmp.json")
self._profiler.export_chrome_trace(tmp_file)
with open(tmp_file) as f:
content = f.read()
with PathManager.open(out_file, "w") as f:
f.write(content)
class EvalHook(HookBase):
"""
Run an evaluation function periodically, and at the end of training.
It is executed every ``eval_period`` iterations and after the last iteration.
"""
def __init__(self, eval_period, eval_function):
"""
Args:
eval_period (int): the period to run `eval_function`.
eval_function (callable): a function which takes no arguments, and
returns a nested dict of evaluation metrics.
Note:
This hook must be enabled in all or none workers.
If you would like only certain workers to perform evaluation,
give other workers a no-op function (`eval_function=lambda: None`).
"""
self._period = eval_period
self._func = eval_function
def after_step(self):
next_iter = self.trainer.iter + 1
is_final = next_iter == self.trainer.max_iter
if is_final or (self._period > 0 and next_iter % self._period == 0):
results = self._func()
if results:
assert isinstance(
results, dict
), "Eval function must return a dict. Got {} instead.".format(results)
flattened_results = flatten_results_dict(results)
for k, v in flattened_results.items():
try:
v = float(v)
except Exception:
raise ValueError(
"[EvalHook] eval_function should return a nested dict of float. "
"Got '{}: {}' instead.".format(k, v)
)
self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False)
# Evaluation may take different time among workers.
# A barrier make them start the next iteration together.
comm.synchronize()
def after_train(self):
# func is likely a closure that holds reference to the trainer
# therefore we clean it to avoid circular reference in the end
del self._func
class PreciseBN(HookBase):
"""
The standard implementation of BatchNorm uses EMA in inference, which is
sometimes suboptimal.
This class computes the true average of statistics rather than the moving average,
and put true averages to every BN layer in the given model.
It is executed every ``period`` iterations and after the last iteration.
"""
def __init__(self, period, model, data_loader, num_iter):
"""
Args:
period (int): the period this hook is run, or 0 to not run during training.
The hook will always run in the end of training.
model (nn.Module): a module whose all BN layers in training mode will be
updated by precise BN.
Note that user is responsible for ensuring the BN layers to be
updated are in training mode when this hook is triggered.
data_loader (iterable): it will produce data to be run by `model(data)`.
num_iter (int): number of iterations used to compute the precise
statistics.
"""
self._logger = logging.getLogger(__name__)
if len(get_bn_modules(model)) == 0:
self._logger.info(
"PreciseBN is disabled because model does not contain BN layers in training mode."
)
self._disabled = True
return
self._model = model
self._data_loader = data_loader
self._num_iter = num_iter
self._period = period
self._disabled = False
self._data_iter = None
def after_step(self):
next_iter = self.trainer.iter + 1
is_final = next_iter == self.trainer.max_iter
if is_final or (self._period > 0 and next_iter % self._period == 0):
self.update_stats()
def update_stats(self):
"""
Update the model with precise statistics. Users can manually call this method.
"""
if self._disabled:
return
if self._data_iter is None:
self._data_iter = iter(self._data_loader)
num_iter = 0
def data_loader():
nonlocal num_iter
while True:
num_iter += 1
if num_iter % 100 == 0:
self._logger.info(
"Running precise-BN ... {}/{} iterations.".format(num_iter, self._num_iter)
)
# This way we can reuse the same iterator
yield next(self._data_iter)
with EventStorage(): # capture events in a new storage to discard them
self._logger.info(
"Running precise-BN for {} iterations... ".format(self._num_iter)
+ "Note that this could produce different statistics every time."
)
update_bn_stats(self._model, data_loader(), self._num_iter)
| 34.9869
| 100
| 0.616201
|
f11e21bba694182e04d46eb66544183181b5822a
| 981
|
py
|
Python
|
utils/rgbycc.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 28
|
2021-01-27T00:41:40.000Z
|
2022-02-14T10:11:51.000Z
|
utils/rgbycc.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | null | null | null |
utils/rgbycc.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 6
|
2021-02-03T01:42:08.000Z
|
2021-12-03T17:47:19.000Z
|
import torch
def rgb2ycc(samples, round=False, batch=False):
if not batch:
samples = samples.reshape(1, *samples.shape)
k = torch.tensor([[0.299, 0.587, 0.114], [-0.1687, -0.3313, 0.5], [0.5, -0.4187, -0.0813]])
b = torch.tensor([0, 128, 128])
samples = (torch.matmul(samples.permute(0, 2, 3, 1), k.T) + b).permute(0, 3, 1, 2)
if round:
samples = torch.round(samples)
if not batch:
samples = samples.reshape(*samples.shape[1:])
return samples
def ycc2rgb(samples, round=False, batch=False):
if not batch:
samples = samples.reshape(1, *samples.shape)
k = torch.tensor([[1, 0, 1.402], [1, -0.34414, -0.71414], [1, 1.772, 0]])
b = torch.tensor([-179.456, 135.45984, -226.816])
samples = (torch.matmul(samples.permute(0, 2, 3, 1), k.T) + b).permute(0, 3, 1, 2)
if round:
samples = torch.round(samples)
if not batch:
samples = samples.reshape(*samples.shape[1:])
return samples
| 35.035714
| 95
| 0.59633
|
5b88f0bac404ab4e9276a2ba15d026e07123d2a8
| 5,700
|
py
|
Python
|
modules/VTK/wrapping/tests/statismoTests/TestDataManager.py
|
tom-albrecht/statismo
|
e7825afadb1accc4902d911d5f00a8c4bd383a31
|
[
"BSD-3-Clause"
] | null | null | null |
modules/VTK/wrapping/tests/statismoTests/TestDataManager.py
|
tom-albrecht/statismo
|
e7825afadb1accc4902d911d5f00a8c4bd383a31
|
[
"BSD-3-Clause"
] | null | null | null |
modules/VTK/wrapping/tests/statismoTests/TestDataManager.py
|
tom-albrecht/statismo
|
e7825afadb1accc4902d911d5f00a8c4bd383a31
|
[
"BSD-3-Clause"
] | null | null | null |
#
# This file is part of the statismo library.
#
# Author: Marcel Luethi (marcel.luethi@unibas.ch)
#
# Copyright (c) 2011 University of Basel
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of the project's author nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#
import unittest
import tempfile
import os.path
import vtk
import statismo
from statismoTestUtils import DATADIR, getDataFiles, read_vtkpd
class Test(unittest.TestCase):
def setUp(self):
self.datafiles = getDataFiles(DATADIR)
ref = read_vtkpd(self.datafiles[0])
self.representer = statismo.vtkStandardMeshRepresenter.Create(ref)
def tearDown(self):
pass
def testName(self):
pass
def testAddDataset(self):
datamanager = statismo.DataManager_vtkPD.Create(self.representer)
datasets = map(read_vtkpd, self.datafiles)
for (dataset, filename) in zip(datasets, self.datafiles):
datamanager.AddDataset(dataset, filename)
self.assertEqual(datamanager.GetNumberOfSamples(),len(self.datafiles))
for (i, sampleData) in enumerate(datamanager.GetData()):
self.assertEqual(sampleData.GetDatasetURI(),self.datafiles[i])
def testLoadSave(self):
datamanager = statismo.DataManager_vtkPD.Create(self.representer)
datasets = map(read_vtkpd, self.datafiles)
for (dataset, filename) in zip(datasets, self.datafiles):
datamanager.AddDataset(dataset, filename)
tmpfile = tempfile.mktemp(suffix="h5")
representer = statismo.vtkStandardMeshRepresenter.Create()
datamanager.Save(tmpfile)
datamanager_new = statismo.DataManager_vtkPD.Load(representer, tmpfile)
self.assertEqual(datamanager.GetNumberOfSamples(), datamanager_new.GetNumberOfSamples())
sampleSet = datamanager.GetData()
newSampleSet = datamanager_new.GetData()
for (sample, newSample) in zip(sampleSet, newSampleSet):
self.assertTrue((sample.GetSampleVector() == newSample.GetSampleVector()).all() == True)
def testLoadSaveSurrogateData(self):
datamanager = statismo.DataManagerWithSurrogates_vtkPD.Create(self.representer, os.path.join(DATADIR, "..", "hand_images", "surrogates", "hand_surrogates_types.txt"))
ds_filename = os.path.join(DATADIR, "hand-1.vtk")
ds = read_vtkpd(ds_filename)
surrogate_filename = os.path.join(DATADIR, "..", "hand_images", "surrogates", "hand-1_surrogates.txt")
datamanager.AddDatasetWithSurrogates(ds, ds_filename, surrogate_filename)
tmpfile = tempfile.mktemp(suffix="h5")
datamanager.Save(tmpfile)
representer = statismo.vtkStandardMeshRepresenter.Create()
datamanager_new = statismo.DataManagerWithSurrogates_vtkPD.Load(representer, tmpfile)
self.assertEqual(datamanager.GetNumberOfSamples(), datamanager_new.GetNumberOfSamples())
sampleSet = datamanager.GetData()
newSampleSet = datamanager_new.GetData()
for (sample, newSample) in zip(sampleSet, newSampleSet):
self.assertTrue((sample.GetSampleVector() == newSample.GetSampleVector()).all() == True)
def testCrossValidation(self):
datamanager = statismo.DataManager_vtkPD.Create(self.representer)
datasets = map(read_vtkpd, self.datafiles)
for (dataset, filename) in zip(datasets, self.datafiles):
datamanager.AddDataset(dataset, filename)
cvFolds = datamanager.GetCrossValidationFolds(3, True)
self.assertEqual(len(cvFolds), 3)
training_data = cvFolds[0].GetTrainingData()
test_data = cvFolds[0].GetTestingData()
self.assertTrue(len(training_data) + len(test_data) == datamanager.GetNumberOfSamples())
containsSameElement = set(training_data).isdisjoint(test_data)
self.assertTrue(containsSameElement, "a dataset is both in the test and training data")
suite = unittest.TestLoader().loadTestsFromTestCase(Test)
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| 40.714286
| 175
| 0.697368
|
bfa0c85425f0bdeceb330e8145ffd634ab44aa96
| 665
|
py
|
Python
|
migrate_dashboard.py
|
chetanmeh/Miscellany
|
4b203fb27bfad11d337e6491eddc00d66a93fa35
|
[
"BSD-3-Clause"
] | null | null | null |
migrate_dashboard.py
|
chetanmeh/Miscellany
|
4b203fb27bfad11d337e6491eddc00d66a93fa35
|
[
"BSD-3-Clause"
] | null | null | null |
migrate_dashboard.py
|
chetanmeh/Miscellany
|
4b203fb27bfad11d337e6491eddc00d66a93fa35
|
[
"BSD-3-Clause"
] | null | null | null |
# currently only for screenboards
# TODO: add timeboards
from datadog import initialize, api
old_api = "*****"
old_app = "*****"
screenboard_id = ****
options = {
'api_key': old_api,
'app_key': old_app
}
initialize(**options)
screenboard = api.Screenboard.get(screenboard_id)
print(screenboard)
new_api = '*****'
new_app = '*****'
options = {
'api_key': new_api,
'app_key': new_app
}
initialize(**options)
new = api.Screenboard.create(
board_title=screenboard['board_title'],
widgets=screenboard['widgets'],
template_variables=screenboard['template_variables'],
height=screenboard['height'],
width=screenboard['width']
)
print(new)
| 15.465116
| 54
| 0.690226
|
9ff955dc84377f8341d344f3c668917df67c78c9
| 2,542
|
py
|
Python
|
pontoon/base/urls.py
|
Br3nda/pontoon
|
f8749dcb25c75954761e67344a21d19f70900e70
|
[
"BSD-3-Clause"
] | null | null | null |
pontoon/base/urls.py
|
Br3nda/pontoon
|
f8749dcb25c75954761e67344a21d19f70900e70
|
[
"BSD-3-Clause"
] | 795
|
2017-11-02T12:26:10.000Z
|
2020-06-01T13:34:22.000Z
|
pontoon/base/urls.py
|
Br3nda/pontoon
|
f8749dcb25c75954761e67344a21d19f70900e70
|
[
"BSD-3-Clause"
] | null | null | null |
from django.conf.urls import url
from django.views.generic import RedirectView, TemplateView
import views
urlpatterns = [
# Home
url(r'^$', views.home, name='pontoon.home'),
# Terms
url(r'^terms/$',
TemplateView.as_view(template_name='terms.html'),
name='pontoon.terms'),
# TRANSLATE URLs
# Legacy: Translate project's page
url(r'^locale/(?P<locale>[A-Za-z0-9\-\@\.]+)/project/(?P<slug>.+)' +
'/page/(?P<page>.+)/$',
RedirectView.as_view(url="/%(locale)s/%(slug)s/%(page)s/", permanent=True)),
# Legacy: Translate project
url(r'^locale/(?P<locale>[A-Za-z0-9\-\@\.]+)/project/(?P<slug>.+)/$',
RedirectView.as_view(url="/%(locale)s/%(slug)s/", permanent=True)),
# AJAX: Get locale details
url(r'^teams/(?P<locale>[A-Za-z0-9\-\@\.]+)/projects/$',
views.locale_projects,
name='pontoon.locale.projects'),
# AJAX: Get locale-project pages/paths with stats
url(r'^(?P<locale>[A-Za-z0-9\-\@\.]+)/(?P<slug>[\w-]+)/parts/$',
views.locale_project_parts,
name='pontoon.locale.project.parts'),
# AJAX: Get authors and time range data
url(r'^(?P<locale>[A-Za-z0-9\-\@\.]+)/(?P<slug>[\w-]+)/(?P<part>.+)/authors-and-time-range/$',
views.authors_and_time_range,
name='pontoon.authors.and.time.range'),
# Translate project
url(r'^(?P<locale>[A-Za-z0-9\-\@\.]+)/(?P<slug>[\w-]+)/(?P<part>.+)/$',
views.translate,
name='pontoon.translate'),
# Download translation memory
url(r'^(?P<locale>[A-Za-z0-9\-\@\.]+)/(?P<slug>[\w-]+)/(?P<filename>.+)\.tmx$',
views.download_translation_memory,
name='pontoon.download_tmx'),
# AJAX
url(r'^get-entities/', views.entities,
name='pontoon.entities'),
url(r'^update/', views.update_translation,
name='pontoon.update'),
url(r'^get-history/', views.get_translation_history,
name='pontoon.get_history'),
url(r'^unapprove-translation/', views.unapprove_translation,
name='pontoon.unapprove_translation'),
url(r'^reject-translation/', views.reject_translation,
name='pontoon.reject_translation'),
url(r'^unreject-translation/', views.unreject_translation,
name='pontoon.unreject_translation'),
url(r'^other-locales/', views.get_translations_from_other_locales,
name='pontoon.other_locales'),
url(r'^download/', views.download,
name='pontoon.download'),
url(r'^upload/', views.upload,
name='pontoon.upload'),
]
| 35.802817
| 98
| 0.608969
|
82034c4b6cdb21e1376173bbe0349269565ba713
| 276
|
py
|
Python
|
py3_oop/observer.py
|
KT12/Training
|
ac4de382a1387ccfe51404eb3302cc518762a781
|
[
"MIT"
] | 1
|
2017-08-17T04:44:53.000Z
|
2017-08-17T04:44:53.000Z
|
py3_oop/observer.py
|
KT12/training
|
ac4de382a1387ccfe51404eb3302cc518762a781
|
[
"MIT"
] | null | null | null |
py3_oop/observer.py
|
KT12/training
|
ac4de382a1387ccfe51404eb3302cc518762a781
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Aug 8 15:52:58 2017
@author: kt12
"""
from abc import ABCMeta, abstractmethod
class Observer(object):
__metaclass__ = ABCMeta
@abstractmethod
def update(self, *args, **kwargs):
pass
| 17.25
| 39
| 0.637681
|
1fa28fb2d4b87105899957df771454035fd795d7
| 355
|
py
|
Python
|
services/fuse/tests/test_crunchstat.py
|
tomclegg/arvados
|
138eedbea2658ef397c7a58ce48d6f06077ba140
|
[
"Apache-2.0"
] | null | null | null |
services/fuse/tests/test_crunchstat.py
|
tomclegg/arvados
|
138eedbea2658ef397c7a58ce48d6f06077ba140
|
[
"Apache-2.0"
] | null | null | null |
services/fuse/tests/test_crunchstat.py
|
tomclegg/arvados
|
138eedbea2658ef397c7a58ce48d6f06077ba140
|
[
"Apache-2.0"
] | null | null | null |
import subprocess
from integration_test import IntegrationTest
class CrunchstatTest(IntegrationTest):
def test_crunchstat(self):
output = subprocess.check_output(
['./bin/arv-mount',
'--crunchstat-interval', '1',
self.mnt,
'--exec', 'echo', 'ok'])
self.assertEqual("ok\n", output)
| 25.357143
| 44
| 0.597183
|
a98c32c3e2071f37a7ca165b42ed872cd96e60df
| 1,385
|
py
|
Python
|
tests/test_cli.py
|
0jdxt/scrmbl
|
9935bcd7a6010b04364c81c163d89bb394300161
|
[
"MIT"
] | 95
|
2018-10-05T19:52:53.000Z
|
2021-10-04T14:59:55.000Z
|
tests/test_cli.py
|
0jdxt/scrmbl
|
9935bcd7a6010b04364c81c163d89bb394300161
|
[
"MIT"
] | 7
|
2018-10-05T16:36:43.000Z
|
2018-10-07T13:41:52.000Z
|
tests/test_cli.py
|
0jdxt/scrmbl
|
9935bcd7a6010b04364c81c163d89bb394300161
|
[
"MIT"
] | 5
|
2018-10-06T06:50:06.000Z
|
2018-10-11T16:29:49.000Z
|
import re
from click.testing import CliRunner
from scrmbl.cli import cli
def test_hello_world():
runner = CliRunner()
result = runner.invoke(cli, ['Hello, world!'])
assert result.exit_code == 0
assert result.output.split('\r')[-1] == 'Hello, world!\n'
def test_input_stream():
with open('tests/lipsum.txt', 'r') as fin:
data = fin.read()
runner = CliRunner()
result = runner.invoke(cli, ['-s', '0'], input=data)
assert result.exit_code == 0
lines = iter(result.output.split('\r'))
# only retrieve longest lines i.e. full lines
filt = []
prev, curr = None, next(lines)
while curr:
if prev and len(curr) < len(prev):
filt.append(prev)
try:
prev, curr = curr, next(lines)
except StopIteration:
curr = None
for line in filt:
assert line.split('\n')[0] in data
def test_invalid_cmd():
runner = CliRunner()
result = runner.invoke(cli, ['-c', '-'], input='abcdefg')
assert result.exit_code == 2
assert 'Usage:' in result.output
assert 'Error' in result.output
def test_charset():
runner = CliRunner()
result = runner.invoke(cli, ['-c', 'tests/chars.txt', 'test'])
assert result.exit_code == 0
for line in result.output.split('\r'):
if line:
assert re.match(r'^[tes]{0,4}[abcdefg]?$', line)
| 25.181818
| 66
| 0.59639
|
8260da1ad97e96ab709f0ba3e810244ffe9ff7d6
| 5,972
|
py
|
Python
|
threeML/minimizer/ROOT_minimizer.py
|
fkunzweiler/threeML
|
29779394ac7e50ddb5e28411fe1a5125431e612f
|
[
"BSD-3-Clause"
] | null | null | null |
threeML/minimizer/ROOT_minimizer.py
|
fkunzweiler/threeML
|
29779394ac7e50ddb5e28411fe1a5125431e612f
|
[
"BSD-3-Clause"
] | null | null | null |
threeML/minimizer/ROOT_minimizer.py
|
fkunzweiler/threeML
|
29779394ac7e50ddb5e28411fe1a5125431e612f
|
[
"BSD-3-Clause"
] | null | null | null |
import ROOT
import numpy as np
from threeML.minimizer.minimization import LocalMinimizer, FitFailed, CannotComputeCovariance
from threeML.io.dict_with_pretty_print import DictWithPrettyPrint
# These are the status returned by Minuit
# status = 1 : Covariance was made pos defined
# status = 2 : Hesse is invalid
# status = 3 : Edm is above max
# status = 4 : Reached call limit
# status = 5 : Any other failure
_status_translation = {1: 'Covariance was made pos. defined',
2: 'Hesse is invalid',
3: 'Edm is above maximum',
4: 'Reached call limit',
5: 'Unknown failure'}
# Status for HESSE
# status += 100*hesseStatus where hesse status is:
# status = 1 : hesse failed
# status = 2 : matrix inversion failed
# status = 3 : matrix is not pos defined
_hesse_status_translation = {100: 'HESSE failed',
200: 'Covariance matrix inversion failed',
300: 'Covariance matrix is not positive defined'}
class FuncWrapper(ROOT.TPyMultiGenFunction):
def __init__(self, function, dimensions):
ROOT.TPyMultiGenFunction.__init__(self, self)
self.function = function
self.dimensions = int(dimensions)
def NDim(self):
return self.dimensions
def DoEval(self, args):
new_args = map(lambda i:args[i],range(self.dimensions))
return self.function(*new_args)
class ROOTMinimizer(LocalMinimizer):
valid_setup_keys = ('ftol', 'max_function_calls', 'strategy')
def __init__(self, function, parameters, verbosity=0, setup_dict=None):
super(ROOTMinimizer, self).__init__(function, parameters, verbosity, setup_dict)
def _setup(self, user_setup_dict):
# Defaults
setup_dict = {'ftol': 1.0,
'max_function_calls': 100000,
'strategy': 1}
# Update defaults if needed
if user_setup_dict is not None:
for key in user_setup_dict:
setup_dict[key] = user_setup_dict[key]
# Setup the minimizer algorithm
self.functor = FuncWrapper(self.function, self.Npar)
self.minimizer = ROOT.Minuit2.Minuit2Minimizer("Minimize")
self.minimizer.Clear()
self.minimizer.SetMaxFunctionCalls(setup_dict['max_function_calls'])
self.minimizer.SetPrintLevel(self.verbosity)
self.minimizer.SetErrorDef(0.5)
self.minimizer.SetStrategy(setup_dict['strategy'])
self.minimizer.SetTolerance(setup_dict['ftol'])
self.minimizer.SetFunction(self.functor)
self.minimizer.SetPrintLevel(int(self.verbosity))
# Set up the parameters in internal reference
for i, (par_name, (cur_value, cur_delta, cur_min, cur_max)) in enumerate(self._internal_parameters.items()):
if cur_min is not None and cur_max is not None:
# Variable with lower and upper limit
self.minimizer.SetLimitedVariable(i, par_name, cur_value, cur_delta, cur_min, cur_max)
elif cur_min is not None and cur_max is None:
# Lower limited
self.minimizer.SetLowerLimitedVariable(i, par_name, cur_value, cur_delta, cur_min)
elif cur_min is None and cur_max is not None:
# upper limited
self.minimizer.SetUpperLimitedVariable(i, par_name, cur_value, cur_delta, cur_max)
else:
# No limits
self.minimizer.SetVariable(i, par_name, cur_value, cur_delta)
def _minimize(self, compute_covar=True):
# Minimize with MIGRAD
success = self.minimizer.Minimize()
if not success:
# Get status
status = self.minimizer.Status()
if status in _status_translation:
msg = "MIGRAD did not converge. Reason: %s (status: %i)" % (_status_translation[status], status)
else:
msg = "MIGRAD failed with status %i " \
"(see https://root.cern.ch/root/html/ROOT__Minuit2__Minuit2Minimizer.html)" % status
raise FitFailed(msg)
# Gather results
minimum = self.minimizer.MinValue()
best_fit_values = np.array(map(lambda x: x[0], zip(self.minimizer.X(), range(self.Npar))))
return best_fit_values, minimum
def _compute_covariance_matrix(self, best_fit_values):
# Gather the current status so we can offset it later
status_before_hesse = self.minimizer.Status()
# Use Hesse to compute the covariance matrix accurately
self.minimizer.Hesse()
# Gather the current status and remove the offset so that we get the HESSE status
status_after_hesse = self.minimizer.Status() - status_before_hesse
if status_after_hesse > 0:
failure_reason = _hesse_status_translation[status_after_hesse]
raise CannotComputeCovariance("HESSE failed. Reason: %s (status: %i)" % (failure_reason,
status_after_hesse))
# Gather the covariance matrix and return it
covariance_matrix = np.zeros((self.Npar, self.Npar))
for i in range(self.Npar):
for j in range(self.Npar):
covariance_matrix[i,j] = self.minimizer.CovMatrix(i,j)
return covariance_matrix
def _get_errors(self):
# Re-implement this in order to use MINOS
errors = DictWithPrettyPrint()
for i, par_name in enumerate(self.parameters):
err_low = ROOT.Double(0)
err_up = ROOT.Double(0)
self.minimizer.GetMinosError(i, err_low, err_up)
errors[par_name] = (err_low, err_up)
return errors
# GetMinosError(unsigned
# int
# i, double & errLow, double & errUp, int = 0)
| 31.267016
| 116
| 0.621902
|
6d0f078f5369e0d248a64f2f5b6746c83d5bd9ef
| 2,457
|
py
|
Python
|
src/engine/SCons/Tool/nasm.py
|
datalogics-staylor/scons
|
4c48deb6947066e53aac7d86621a7ec17f3b4034
|
[
"MIT"
] | 3
|
2017-01-06T09:26:23.000Z
|
2017-03-04T04:13:20.000Z
|
src/engine/SCons/Tool/nasm.py
|
datalogics-staylor/scons
|
4c48deb6947066e53aac7d86621a7ec17f3b4034
|
[
"MIT"
] | 2
|
2015-10-27T20:17:24.000Z
|
2016-08-04T21:49:56.000Z
|
src/engine/SCons/Tool/nasm.py
|
datalogics-staylor/scons
|
4c48deb6947066e53aac7d86621a7ec17f3b4034
|
[
"MIT"
] | 4
|
2015-03-31T16:09:15.000Z
|
2021-08-04T12:41:47.000Z
|
"""SCons.Tool.nasm
Tool-specific initialization for nasm, the famous Netwide Assembler.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import SCons.Defaults
import SCons.Tool
import SCons.Util
ASSuffixes = ['.s', '.asm', '.ASM']
ASPPSuffixes = ['.spp', '.SPP']
if SCons.Util.case_sensitive_suffixes('.s', '.S'):
ASPPSuffixes.extend(['.S'])
else:
ASSuffixes.extend(['.S'])
def generate(env):
"""Add Builders and construction variables for nasm to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in ASSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
for suffix in ASPPSuffixes:
static_obj.add_action(suffix, SCons.Defaults.ASPPAction)
static_obj.add_emitter(suffix, SCons.Defaults.StaticObjectEmitter)
env['AS'] = 'nasm'
env['ASFLAGS'] = SCons.Util.CLVar('')
env['ASPPFLAGS'] = '$ASFLAGS'
env['ASCOM'] = '$AS $ASFLAGS -o $TARGET $SOURCES'
env['ASPPCOM'] = '$CC $ASPPFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -c -o $TARGET $SOURCES'
def exists(env):
return env.Detect('nasm')
| 36.671642
| 100
| 0.73545
|
30a5d41745652fec361dbffddeecc9ee5cd9c4d2
| 10,120
|
py
|
Python
|
gan_train.py
|
nipdep/STGAN
|
c72ba6cb9d23d33accc0cfa1958a2005db3ed490
|
[
"MIT"
] | null | null | null |
gan_train.py
|
nipdep/STGAN
|
c72ba6cb9d23d33accc0cfa1958a2005db3ed490
|
[
"MIT"
] | null | null | null |
gan_train.py
|
nipdep/STGAN
|
c72ba6cb9d23d33accc0cfa1958a2005db3ed490
|
[
"MIT"
] | null | null | null |
##! usr/bin/python3
# %%
import config
from src.model.stldisc_model import define_style_descriminator, StyleNet
from src.model.gan_model import define_cnt_descriminator, define_gan, define_generator
from src.support.loss_functions import pairWiseRankingLoss
#from src.model.wavelet_gan_model import define_cnt_descriminator, define_gan, define_generator
import os
import logging
import time
import random
from datetime import datetime
from livelossplot import outputs
import tensorflow as tf
import numpy as np
from numpy import load, zeros, ones
from numpy.random import randint
from sklearn.utils import shuffle
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.initializers import RandomNormal
from tensorflow.keras.callbacks import TensorBoard
from matplotlib import pyplot
from tensorflow.python.autograph.pyct import transformer
from livelossplot import PlotLosses
from livelossplot.outputs import MatplotlibPlot
#%%
# set logger
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
#tensorboard logger
# logdir = config.LOG_DIR+ "/gan_" + datetime.now().strftime("%Y%m%d-%H%M%S")
# tensorboard_callback = TensorBoard(log_dir=logdir, histogram_freq=1, profile_batch=1)
run_opts = tf.compat.v1.RunOptions(report_tensor_allocations_upon_oom = True)
# def pairWiseRankingLoss(y_ref, y_style, label):
# m = tf.cast(tf.broadcast_to(config.LOSS_THD, shape=[y_ref.shape[0], ]), dtype=tf.float32)
# u = tf.cast(tf.broadcast_to(0, shape=[y_ref.shape[0], ]), dtype=tf.float32)
# i = tf.cast(tf.broadcast_to(1, shape=[y_ref.shape[0], ]), dtype=tf.float32)
# y = tf.cast(label, dtype=tf.float32)
# dist = tf.math.abs(tf.keras.losses.cosine_similarity(y_ref,y_style))
# loss = tf.math.multiply(y,dist) + tf.math.multiply((i-y),tf.reduce_max(tf.stack([u,m-dist]), axis=0))
# return tf.cast(tf.reduce_mean(loss), dtype=tf.float32)
# def mixLoss(ref_img, gen_img):
# one = tf.cast(tf.broadcast_to(1, shape=ref_img.shape), dtype=tf.float32)
# two = tf.cast(tf.broadcast_to(2, shape=ref_img.shape), dtype=tf.float32)
# rescaled_ref_img = tf.abs(tf.divide(tf.add(one, ref_img), two))
# rescaled_gen_img = tf.abs(tf.divide(tf.add(one, gen_img), two))
# l1_loss = tf.norm(ref_img-gen_img, ord=1, axis=0)/ref_img.shape[0]
# ms_ssim_loss = tf.reduce_mean(tf.image.ssim_multiscale(rescaled_ref_img, rescaled_gen_img, max_val=1, filter_size=3))
# alpha = tf.cast(config.GEN_LOSS_ALPHA, dtype=tf.float32)
# total_loss = alpha*ms_ssim_loss + (1-alpha)*l1_loss
# return tf.cast(total_loss, dtype=tf.float32)
def ganLoss(dss_loss, dsc_loss, gen_loss):
gan_alpha = config.GAN_ALPHA
gan_beta = config.GAN_BETA
one = 1
tot_loss = 0.6*dss_loss+0.4*dsc_loss+gen_loss
return tot_loss
def add_cnt_loss(dis_loss, gen_loss):
return dis_loss + config.LAMBDAC*gen_loss
def add_style_loss(dis_loss, gen_loss):
return dis_loss + config.LAMBDAS*gen_loss
@tf.function
def train_step(cnt_in, style_in, trans_in, cnt_real, style_real, Xds_stl, Xds_trn, yds, Xdc_cnt, Xdc_trn, ydc):
with tf.GradientTape() as gen_tape, tf.GradientTape() as discs_tape, tf.GradientTape() as discc_tape:
gen_out, dss_out, dst_out, cnt_out = gan_model([cnt_in, style_in])
dss_loss = pairWiseRankingLoss(dss_out, dst_out, style_real)
dsc_loss = dscLoss(cnt_real, cnt_out)
gen_loss = tf.cast(tf.math.abs(cntLoss(trans_in, gen_out)), dtype=tf.float32)
ref_out, trans_out = ds_model([Xds_stl, Xds_trn])
ds_loss = pairWiseRankingLoss(ref_out, trans_out, yds)
logits = dc_model([Xdc_cnt, Xdc_trn])
dc_loss = dscLoss(ydc, logits)
# total_style_loss = add_style_loss(ds_loss, dss_loss)
# total_cnt_loss = add_cnt_loss(dc_loss, dsc_loss)
total_gen_loss = ganLoss(dss_loss, dsc_loss, gen_loss)
#total_gen_loss = mixLoss(trans_in, gen_out)
generator_grads = gen_tape.gradient(total_gen_loss, g_model.trainable_variables)
cnt_disc_grads = discc_tape.gradient(dc_loss, dc_model.trainable_variables)
style_disc_grads = discs_tape.gradient(ds_loss, ds_base_model.trainable_variables)
gen_opt.apply_gradients(zip(generator_grads, g_model.trainable_variables))
dc_opt.apply_gradients(zip(cnt_disc_grads, dc_model.trainable_variables))
ds_opt.apply_gradients(zip(style_disc_grads, ds_base_model.trainable_variables))
return total_gen_loss, dc_loss, ds_loss
def load_pixel_metrics(filename):
full_mat = np.load(filename)
style_pixels = (full_mat['style']-127.5)/127.5
content_pixels = (full_mat['cotent']-127.5)/127.5
transfer_mat = (full_mat['transfers']-127.5)/127.5
return style_pixels, content_pixels, transfer_mat
def generate_real_samples(dataset, n_samples, patch_shape):
style, content, trans = dataset
cnt_idxs = random.sample(range(style.shape[1]), n_samples)
style_idxs = np.random.randint(0, style.shape[0], n_samples)
cnt_pixels = content[cnt_idxs]
style_pixels = style[style_idxs]
mat_pixels = trans[style_idxs, cnt_idxs, ...]
y_dc = ones((n_samples, patch_shape, patch_shape, 1))
y_ds = ones((n_samples))
return [cnt_pixels, style_pixels, mat_pixels], y_dc, y_ds
def generate_fake_samples(g_model, samples, patch_shape):
cnt_img, style_img = samples
X = g_model([cnt_img, style_img])
y_dc = zeros((len(X), patch_shape, patch_shape, 1))
y_ds = zeros((len(X)))
return X, y_dc, y_ds
def summarize_performance(step, g_model, dataset, n_samples=3):
[X_cnt, X_stl, X_trn], _, _ = generate_real_samples(dataset, n_samples, 1)
X_fake, _, _ = generate_fake_samples(g_model, [X_cnt, X_stl], 1)
#rescale pixels values
X_cnt = (X_cnt+1)/2.0
X_stl = (X_stl+1)/2.0
X_trn = (X_trn+1)/2.0
X_fake = (X_fake+1)/2.0
# plot samples
for i in range(n_samples):
pyplot.subplot(4, n_samples, 1 + i)
pyplot.axis('off')
pyplot.imshow(X_cnt[i])
for i in range(n_samples):
pyplot.subplot(4, n_samples, 1 + n_samples + i)
pyplot.axis('off')
pyplot.imshow(X_stl[i])
for i in range(n_samples):
pyplot.subplot(4, n_samples, 1 + 2*n_samples + i)
pyplot.axis('off')
pyplot.imshow(X_trn[i])
for i in range(n_samples):
pyplot.subplot(4, n_samples, 1 + 3*n_samples + i)
pyplot.axis('off')
pyplot.imshow(X_fake[i])
# save result image
filename = f'plot_g{step+1}.png'
pyplot.savefig(os.path.join(config.GAN_LOG_DIR,filename))
pyplot.close()
# save model checkpoint
# model_filename = f'model_{step+1}.h5'
# g_model.save(os.path.join(config.GAN_LOG_DIR,model_filename))
# logger.info(f">> Saved : {filename} , {model_filename} ")
log_dict = {'epoch' : [], 'dss_loss' : [], 'dsc_loss' : [], 'gen_loss' : []}
def train(g_model, dataset, n_epoch=100, batch_size=16):
n_patch = dc_model.output_shape[1]
batch_per_epoch = (dataset[1].shape[0]*(dataset[1].shape[1]//2))//batch_size
n_steps = n_epoch*batch_per_epoch
plotlosses = PlotLosses(outputs=[MatplotlibPlot()], groups={'dss model' : ['dss_loss'], 'dsc model' : ['dsc_loss'], 'gan model' : ['gen_loss']})
save_interval = 10
log_interval = 1
for i in range(n_steps):
[X_cnt, X_stl, X_trn], ydc_real, yds_real = generate_real_samples(dataset, batch_size, n_patch)
X_fake_trn, ydc_fake, yds_fake = generate_fake_samples(g_model, [X_cnt, X_stl], n_patch)
# train style descriminator
usXds_stl = np.concatenate((X_stl, X_stl))
usXds_trn = np.concatenate((X_trn, X_fake_trn))
usysd = np.concatenate((yds_real, yds_fake))
Xds_stl, Xds_trn, yds = shuffle(usXds_stl, usXds_trn, usysd)
#train content descriminator
usXdc_cnt = np.concatenate((X_cnt, X_cnt))
usXdc_trn = np.concatenate((X_trn, X_fake_trn))
usydc = np.concatenate((ydc_real, ydc_fake))
Xdc_cnt, Xdc_trn, ydc = shuffle(usXdc_cnt, usXdc_trn, usydc)
#train GAN model
gan_loss, dc_loss, ds_loss = train_step(X_cnt, X_stl, X_trn, ydc_real, yds_real, Xds_stl, Xds_trn, yds, Xdc_cnt, Xdc_trn, ydc)
#logger.info(f'[{i}/{n_steps}] : style descriminator total loss : {ds_loss} \n content descriminator total loss : {dc_loss} \n GAN total loss : {gan_total_loss} | GAN dss loss : {gan_dss_loss} | GAN dsc loss : {gan_dsc_loss}')
# print(f'[{i}/{n_steps}] : style descriminator total loss : {ds_loss} \n content descriminator total loss : {dc_loss} \n GAN total loss : {gan_loss}')
if i % 100 == 0:
log_dict['epoch'].append(i//100)
log_dict['dss_loss'].append(ds_loss)
log_dict['dsc_loss'].append(dc_loss)
log_dict['gen_loss'].append(gan_loss)
plotlosses.update({
'dss_loss' : ds_loss,
'dsc_loss' : dc_loss,
'gen_loss' : gan_loss,
})
plotlosses.send()
# if (i+1) % (batch_per_epoch*save_interval) == 0:
# summarize_performance(i, g_model, dataset)
if i % 1000 == 0:
summarize_performance(i, g_model, dataset)
# if i == config.GAN_BP:
# break
#%%
if __name__ == "__main__":
#load dataset
dataset = load_pixel_metrics(config.GAN_DATASET_DIR)
dscLoss = tf.keras.losses.BinaryCrossentropy()
cntLoss = tf.keras.losses.MeanAbsoluteError()
lr_fn = tf.optimizers.schedules.PolynomialDecay(1e-4, train_steps, 1e-5, 2)
opt = tf.optimizers.Adam(lr_fn)
gen_opt = tf.keras.optimizers.Adam(1e-4)
ds_opt = tf.keras.optimizers.Adam(lr_fn)
dc_opt = tf.keras.optimizers.Adam(lr_fn)
#init models
g_model = define_generator(config.GAN_LATENT_SIZE, config.IMAGE_SHAPE)
dc_model = define_cnt_descriminator()
ds_base_model = define_style_descriminator(16, config.IMAGE_SHAPE)
ds_model = StyleNet(ds_base_model)
gan_model = define_gan(g_model, dc_model, ds_model)
#train model
train(g_model, dataset, 100, config.GAN_BATCH_SIZE)
# %%
| 42.521008
| 234
| 0.695652
|
a91973f7e98aa014d7b9ccbdb6b7d79641a9ccfd
| 803
|
py
|
Python
|
venv/bin/rst2odt.py
|
sbassam/nub-summarizer
|
4798d13d5a959b41b769c1a0ef2ccc994eed6737
|
[
"Apache-2.0"
] | null | null | null |
venv/bin/rst2odt.py
|
sbassam/nub-summarizer
|
4798d13d5a959b41b769c1a0ef2ccc994eed6737
|
[
"Apache-2.0"
] | 5
|
2021-04-30T21:25:13.000Z
|
2022-03-12T00:43:14.000Z
|
venv/bin/rst2odt.py
|
sbassam/nub-summarizer
|
4798d13d5a959b41b769c1a0ef2ccc994eed6737
|
[
"Apache-2.0"
] | null | null | null |
#!/Users/soroush/Documents/nub-summarizer/venv/bin/python
# $Id: rst2odt.py 5839 2009-01-07 19:09:28Z dkuhlman $
# Author: Dave Kuhlman <dkuhlman@rexx.com>
# Copyright: This module has been placed in the public domain.
"""
A front end to the Docutils Publisher, producing OpenOffice documents.
"""
import sys
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline_to_binary, default_description
from docutils.writers.odf_odt import Writer, Reader
description = ('Generates OpenDocument/OpenOffice/ODF documents from '
'standalone reStructuredText sources. ' + default_description)
writer = Writer()
reader = Reader()
output = publish_cmdline_to_binary(reader=reader, writer=writer,
description=description)
| 25.903226
| 78
| 0.755915
|
47a28c3e7cc809c8bb10acdf3cdd7ece6ee409a6
| 2,826
|
py
|
Python
|
client/verta/verta/data_types/_float_histogram.py
|
stefan-petrov-toptal/modeldb
|
a8a9b9da6ed964c91351230b2f0d2703c75794de
|
[
"Apache-2.0"
] | null | null | null |
client/verta/verta/data_types/_float_histogram.py
|
stefan-petrov-toptal/modeldb
|
a8a9b9da6ed964c91351230b2f0d2703c75794de
|
[
"Apache-2.0"
] | null | null | null |
client/verta/verta/data_types/_float_histogram.py
|
stefan-petrov-toptal/modeldb
|
a8a9b9da6ed964c91351230b2f0d2703c75794de
|
[
"Apache-2.0"
] | 1
|
2021-05-04T13:52:09.000Z
|
2021-05-04T13:52:09.000Z
|
# -*- coding: utf-8 -*-
from __future__ import division
import collections
from ..external import six
from .._internal_utils import arg_handler
from .._internal_utils.importer import maybe_dependency
from . import _VertaDataType
class FloatHistogram(_VertaDataType):
"""
Representation of a float histogram.
Parameters
----------
bucket_limits : list of float
Boundary values between buckets.
data : list of int
Counts for each bucket.
Examples
--------
.. code-block:: python
from verta.data_types import FloatHistogram
data = FloatHistogram(
bucket_limits=[1, 13, 25, 37, 49, 61],
data=[15, 53, 91, 34, 7],
)
run.log_attribute("age_histogram", data)
"""
_TYPE_NAME = "floatHistogram"
_VERSION = "v1"
@arg_handler.args_to_builtin(ignore_self=True)
def __init__(self, bucket_limits, data):
self._scipy_spatial = maybe_dependency("scipy.spatial")
if self._scipy_spatial is None:
raise ImportError("scipy is not installed; try `pip install scipy`")
if len(bucket_limits) != len(data) + 1:
raise ValueError(
"length of `bucket_limits` must be 1 greater than length of `data`"
)
if not arg_handler.contains_only_numbers(bucket_limits):
raise TypeError("`bucket_limits` must contain only numbers")
if not list(bucket_limits) == sorted(bucket_limits):
raise ValueError("`bucket_limits` must be in ascending order")
if not all(isinstance(count, six.integer_types) for count in data):
raise TypeError("`data` must contain all integers")
self._bucket_limits = bucket_limits
self._data = data
def _as_dict(self):
return self._as_dict_inner(
{
"bucketLimits": self._bucket_limits,
"data": self._data,
}
)
@classmethod
def _from_dict_inner(cls, d):
data = d[cls._TYPE_NAME]
return cls(bucket_limits=data["bucketLimits"], data=data["data"])
def dist(self, other):
if not isinstance(other, type(self)):
raise TypeError(
"`other` must be type {}, not {}".format(type(self), type(other))
)
if self._bucket_limits != other._bucket_limits:
raise ValueError(
"bucket limits must match (self: {}, other: {})".format(
self._bucket_limits, other._bucket_limits,
)
)
return self._scipy_spatial.distance.cosine(
self.normalized_data(),
other.normalized_data(),
)
def normalized_data(self):
total = sum(self._data)
return [x / total for x in self._data]
| 29.4375
| 83
| 0.600495
|
d0068682d47e95e696ab3d58cd250984aff77789
| 64
|
py
|
Python
|
src/states/game_level_data/__init__.py
|
wiktorowski211/Fruit-Viking
|
d45c35a56425c4d42a2ee59586bd559037cf07db
|
[
"MIT"
] | null | null | null |
src/states/game_level_data/__init__.py
|
wiktorowski211/Fruit-Viking
|
d45c35a56425c4d42a2ee59586bd559037cf07db
|
[
"MIT"
] | 2
|
2019-05-22T09:06:16.000Z
|
2019-05-28T21:38:04.000Z
|
src/states/game_level_data/__init__.py
|
wiktorowski211/Fruit-Viking
|
d45c35a56425c4d42a2ee59586bd559037cf07db
|
[
"MIT"
] | 1
|
2019-03-21T13:02:29.000Z
|
2019-03-21T13:02:29.000Z
|
from .remains import Remains
from .background import Background
| 21.333333
| 34
| 0.84375
|
b197f75abe5b56b65ff2b087bc7aa1ee242267fa
| 52,871
|
py
|
Python
|
astropy/wcs/tests/test_wcs.py
|
emirkmo/astropy
|
d96cd45b25ae55117d1bcc9c40e83a82037fc815
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/wcs/tests/test_wcs.py
|
emirkmo/astropy
|
d96cd45b25ae55117d1bcc9c40e83a82037fc815
|
[
"BSD-3-Clause"
] | null | null | null |
astropy/wcs/tests/test_wcs.py
|
emirkmo/astropy
|
d96cd45b25ae55117d1bcc9c40e83a82037fc815
|
[
"BSD-3-Clause"
] | null | null | null |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import io
import os
from contextlib import nullcontext
from datetime import datetime
from packaging.version import Version
import pytest
import numpy as np
from numpy.testing import (
assert_allclose, assert_array_almost_equal, assert_array_almost_equal_nulp,
assert_array_equal)
from astropy import wcs
from astropy.wcs import _wcs # noqa
from astropy import units as u
from astropy.utils.data import (
get_pkg_data_filenames, get_pkg_data_contents, get_pkg_data_filename)
from astropy.utils.misc import NumpyRNGContext
from astropy.utils.exceptions import (
AstropyUserWarning, AstropyWarning, AstropyDeprecationWarning)
from astropy.tests.helper import assert_quantity_allclose
from astropy.io import fits
from astropy.coordinates import SkyCoord
from astropy.nddata import Cutout2D
_WCSLIB_VER = Version(_wcs.__version__)
# NOTE: User can choose to use system wcslib instead of bundled.
def ctx_for_v71_dateref_warnings():
if _WCSLIB_VER >= Version('7.1') and _WCSLIB_VER < Version('7.3'):
ctx = pytest.warns(
wcs.FITSFixedWarning,
match=r"'datfix' made the change 'Set DATE-REF to '1858-11-17' from MJD-REF'\.")
else:
ctx = nullcontext()
return ctx
class TestMaps:
def setup(self):
# get the list of the hdr files that we want to test
self._file_list = list(get_pkg_data_filenames(
"data/maps", pattern="*.hdr"))
def test_consistency(self):
# Check to see that we actually have the list we expect, so that we
# do not get in a situation where the list is empty or incomplete and
# the tests still seem to pass correctly.
# how many do we expect to see?
n_data_files = 28
assert len(self._file_list) == n_data_files, (
"test_spectra has wrong number data files: found {}, expected "
" {}".format(len(self._file_list), n_data_files))
def test_maps(self):
for filename in self._file_list:
# use the base name of the file, so we get more useful messages
# for failing tests.
filename = os.path.basename(filename)
# Now find the associated file in the installed wcs test directory.
header = get_pkg_data_contents(
os.path.join("data", "maps", filename), encoding='binary')
# finally run the test.
wcsobj = wcs.WCS(header)
world = wcsobj.wcs_pix2world([[97, 97]], 1)
assert_array_almost_equal(world, [[285.0, -66.25]], decimal=1)
pix = wcsobj.wcs_world2pix([[285.0, -66.25]], 1)
assert_array_almost_equal(pix, [[97, 97]], decimal=0)
class TestSpectra:
def setup(self):
self._file_list = list(get_pkg_data_filenames("data/spectra",
pattern="*.hdr"))
def test_consistency(self):
# Check to see that we actually have the list we expect, so that we
# do not get in a situation where the list is empty or incomplete and
# the tests still seem to pass correctly.
# how many do we expect to see?
n_data_files = 6
assert len(self._file_list) == n_data_files, (
"test_spectra has wrong number data files: found {}, expected "
" {}".format(len(self._file_list), n_data_files))
def test_spectra(self):
for filename in self._file_list:
# use the base name of the file, so we get more useful messages
# for failing tests.
filename = os.path.basename(filename)
# Now find the associated file in the installed wcs test directory.
header = get_pkg_data_contents(
os.path.join("data", "spectra", filename), encoding='binary')
# finally run the test.
if _WCSLIB_VER >= Version('7.4'):
ctx = pytest.warns(
wcs.FITSFixedWarning,
match=r"'datfix' made the change 'Set MJD-OBS to 53925\.853472 from DATE-OBS'\.") # noqa
else:
ctx = nullcontext()
with ctx:
all_wcs = wcs.find_all_wcs(header)
assert len(all_wcs) == 9
def test_fixes():
"""
From github issue #36
"""
header = get_pkg_data_contents('data/nonstandard_units.hdr', encoding='binary')
with pytest.raises(wcs.InvalidTransformError), pytest.warns(wcs.FITSFixedWarning) as w:
wcs.WCS(header, translate_units='dhs')
if Version('7.4') <= _WCSLIB_VER < Version('7.6'):
assert len(w) == 3
assert "'datfix' made the change 'Success'." in str(w.pop().message)
else:
assert len(w) == 2
first_wmsg = str(w[0].message)
assert 'unitfix' in first_wmsg and 'Hz' in first_wmsg and 'M/S' in first_wmsg
assert 'plane angle' in str(w[1].message) and 'm/s' in str(w[1].message)
# Ignore "PV2_2 = 0.209028857410973 invalid keyvalue" warning seen on Windows.
@pytest.mark.filterwarnings(r'ignore:PV2_2')
def test_outside_sky():
"""
From github issue #107
"""
header = get_pkg_data_contents(
'data/outside_sky.hdr', encoding='binary')
w = wcs.WCS(header)
assert np.all(np.isnan(w.wcs_pix2world([[100., 500.]], 0))) # outside sky
assert np.all(np.isnan(w.wcs_pix2world([[200., 200.]], 0))) # outside sky
assert not np.any(np.isnan(w.wcs_pix2world([[1000., 1000.]], 0)))
def test_pix2world():
"""
From github issue #1463
"""
# TODO: write this to test the expected output behavior of pix2world,
# currently this just makes sure it doesn't error out in unexpected ways
# (and compares `wcs.pc` and `result` values?)
filename = get_pkg_data_filename('data/sip2.fits')
with pytest.warns(wcs.FITSFixedWarning) as caught_warnings:
# this raises a warning unimportant for this testing the pix2world
# FITSFixedWarning(u'The WCS transformation has more axes (2) than
# the image it is associated with (0)')
ww = wcs.WCS(filename)
# might as well monitor for changing behavior
if Version('7.4') <= _WCSLIB_VER < Version('7.6'):
assert len(caught_warnings) == 2
else:
assert len(caught_warnings) == 1
n = 3
pixels = (np.arange(n) * np.ones((2, n))).T
result = ww.wcs_pix2world(pixels, 0, ra_dec_order=True)
# Catch #2791
ww.wcs_pix2world(pixels[..., 0], pixels[..., 1], 0, ra_dec_order=True)
# assuming that the data of sip2.fits doesn't change
answer = np.array([[0.00024976, 0.00023018],
[0.00023043, -0.00024997]])
assert np.allclose(ww.wcs.pc, answer, atol=1.e-8)
answer = np.array([[202.39265216, 47.17756518],
[202.39335826, 47.17754619],
[202.39406436, 47.1775272]])
assert np.allclose(result, answer, atol=1.e-8, rtol=1.e-10)
def test_load_fits_path():
fits_name = get_pkg_data_filename('data/sip.fits')
with pytest.warns(wcs.FITSFixedWarning):
wcs.WCS(fits_name)
def test_dict_init():
"""
Test that WCS can be initialized with a dict-like object
"""
# Dictionary with no actual WCS, returns identity transform
with ctx_for_v71_dateref_warnings():
w = wcs.WCS({})
xp, yp = w.wcs_world2pix(41., 2., 1)
assert_array_almost_equal_nulp(xp, 41., 10)
assert_array_almost_equal_nulp(yp, 2., 10)
# Valid WCS
hdr = {
'CTYPE1': 'GLON-CAR',
'CTYPE2': 'GLAT-CAR',
'CUNIT1': 'deg',
'CUNIT2': 'deg',
'CRPIX1': 1,
'CRPIX2': 1,
'CRVAL1': 40.,
'CRVAL2': 0.,
'CDELT1': -0.1,
'CDELT2': 0.1
}
if _WCSLIB_VER >= Version('7.1'):
hdr['DATEREF'] = '1858-11-17'
if _WCSLIB_VER >= Version('7.4'):
ctx = pytest.warns(
wcs.wcs.FITSFixedWarning,
match=r"'datfix' made the change 'Set MJDREF to 0\.000000 from DATEREF'\.")
else:
ctx = nullcontext()
with ctx:
w = wcs.WCS(hdr)
xp, yp = w.wcs_world2pix(41., 2., 0)
assert_array_almost_equal_nulp(xp, -10., 10)
assert_array_almost_equal_nulp(yp, 20., 10)
def test_extra_kwarg():
"""
Issue #444
"""
w = wcs.WCS()
with NumpyRNGContext(123456789):
data = np.random.rand(100, 2)
with pytest.raises(TypeError):
w.wcs_pix2world(data, origin=1)
def test_3d_shapes():
"""
Issue #444
"""
w = wcs.WCS(naxis=3)
with NumpyRNGContext(123456789):
data = np.random.rand(100, 3)
result = w.wcs_pix2world(data, 1)
assert result.shape == (100, 3)
result = w.wcs_pix2world(
data[..., 0], data[..., 1], data[..., 2], 1)
assert len(result) == 3
def test_preserve_shape():
w = wcs.WCS(naxis=2)
x = np.random.random((2, 3, 4))
y = np.random.random((2, 3, 4))
xw, yw = w.wcs_pix2world(x, y, 1)
assert xw.shape == (2, 3, 4)
assert yw.shape == (2, 3, 4)
xp, yp = w.wcs_world2pix(x, y, 1)
assert xp.shape == (2, 3, 4)
assert yp.shape == (2, 3, 4)
def test_broadcasting():
w = wcs.WCS(naxis=2)
x = np.random.random((2, 3, 4))
y = 1
xp, yp = w.wcs_world2pix(x, y, 1)
assert xp.shape == (2, 3, 4)
assert yp.shape == (2, 3, 4)
def test_shape_mismatch():
w = wcs.WCS(naxis=2)
x = np.random.random((2, 3, 4))
y = np.random.random((3, 2, 4))
with pytest.raises(ValueError) as exc:
xw, yw = w.wcs_pix2world(x, y, 1)
assert exc.value.args[0] == "Coordinate arrays are not broadcastable to each other"
with pytest.raises(ValueError) as exc:
xp, yp = w.wcs_world2pix(x, y, 1)
assert exc.value.args[0] == "Coordinate arrays are not broadcastable to each other"
# There are some ambiguities that need to be worked around when
# naxis == 1
w = wcs.WCS(naxis=1)
x = np.random.random((42, 1))
xw = w.wcs_pix2world(x, 1)
assert xw.shape == (42, 1)
x = np.random.random((42,))
xw, = w.wcs_pix2world(x, 1)
assert xw.shape == (42,)
def test_invalid_shape():
# Issue #1395
w = wcs.WCS(naxis=2)
xy = np.random.random((2, 3))
with pytest.raises(ValueError) as exc:
w.wcs_pix2world(xy, 1)
assert exc.value.args[0] == 'When providing two arguments, the array must be of shape (N, 2)'
xy = np.random.random((2, 1))
with pytest.raises(ValueError) as exc:
w.wcs_pix2world(xy, 1)
assert exc.value.args[0] == 'When providing two arguments, the array must be of shape (N, 2)'
def test_warning_about_defunct_keywords():
header = get_pkg_data_contents('data/defunct_keywords.hdr', encoding='binary')
if Version('7.4') <= _WCSLIB_VER < Version('7.6'):
n_warn = 5
else:
n_warn = 4
# Make sure the warnings come out every time...
for _ in range(2):
with pytest.warns(wcs.FITSFixedWarning) as w:
wcs.WCS(header)
assert len(w) == n_warn
# 7.4 adds a fifth warning "'datfix' made the change 'Success'."
for item in w[:4]:
assert 'PCi_ja' in str(item.message)
def test_warning_about_defunct_keywords_exception():
header = get_pkg_data_contents('data/defunct_keywords.hdr', encoding='binary')
with pytest.warns(wcs.FITSFixedWarning):
wcs.WCS(header)
def test_to_header_string():
hdrstr = (
"WCSAXES = 2 / Number of coordinate axes ",
"CRPIX1 = 0.0 / Pixel coordinate of reference point ",
"CRPIX2 = 0.0 / Pixel coordinate of reference point ",
"CDELT1 = 1.0 / Coordinate increment at reference point ",
"CDELT2 = 1.0 / Coordinate increment at reference point ",
"CRVAL1 = 0.0 / Coordinate value at reference point ",
"CRVAL2 = 0.0 / Coordinate value at reference point ",
"LATPOLE = 90.0 / [deg] Native latitude of celestial pole ",
)
if _WCSLIB_VER >= Version('7.3'):
hdrstr += (
"MJDREF = 0.0 / [d] MJD of fiducial time ",
)
elif _WCSLIB_VER >= Version('7.1'):
hdrstr += (
"DATEREF = '1858-11-17' / ISO-8601 fiducial time ",
"MJDREFI = 0.0 / [d] MJD of fiducial time, integer part ",
"MJDREFF = 0.0 / [d] MJD of fiducial time, fractional part "
)
hdrstr += ("END", )
header_string = ''.join(hdrstr)
w = wcs.WCS()
h0 = fits.Header.fromstring(w.to_header_string().strip())
if 'COMMENT' in h0:
del h0['COMMENT']
if '' in h0:
del h0['']
h1 = fits.Header.fromstring(header_string.strip())
assert dict(h0) == dict(h1)
def test_to_fits():
nrec = 11 if _WCSLIB_VER >= Version('7.1') else 8
if _WCSLIB_VER < Version('7.1'):
nrec = 8
elif _WCSLIB_VER < Version('7.3'):
nrec = 11
else:
nrec = 9
w = wcs.WCS()
header_string = w.to_header()
wfits = w.to_fits()
assert isinstance(wfits, fits.HDUList)
assert isinstance(wfits[0], fits.PrimaryHDU)
assert header_string == wfits[0].header[-nrec:]
def test_to_header_warning():
fits_name = get_pkg_data_filename('data/sip.fits')
with pytest.warns(wcs.FITSFixedWarning):
x = wcs.WCS(fits_name)
with pytest.warns(AstropyWarning, match='A_ORDER') as w:
x.to_header()
assert len(w) == 1
def test_no_comments_in_header():
w = wcs.WCS()
header = w.to_header()
assert w.wcs.alt not in header
assert 'COMMENT' + w.wcs.alt.strip() not in header
assert 'COMMENT' not in header
wkey = 'P'
header = w.to_header(key=wkey)
assert wkey not in header
assert 'COMMENT' not in header
assert 'COMMENT' + w.wcs.alt.strip() not in header
def test_find_all_wcs_crash():
"""
Causes a double free without a recent fix in wcslib_wrap.C
"""
with open(get_pkg_data_filename("data/too_many_pv.hdr")) as fd:
header = fd.read()
# We have to set fix=False here, because one of the fixing tasks is to
# remove redundant SCAMP distortion parameters when SIP distortion
# parameters are also present.
with pytest.raises(wcs.InvalidTransformError), pytest.warns(wcs.FITSFixedWarning):
wcs.find_all_wcs(header, fix=False)
# NOTE: Warning bubbles up from C layer during wcs.validate() and
# is hard to catch, so we just ignore it.
@pytest.mark.filterwarnings("ignore")
def test_validate():
results = wcs.validate(get_pkg_data_filename("data/validate.fits"))
results_txt = sorted(set([x.strip() for x in repr(results).splitlines()]))
if _WCSLIB_VER >= Version('7.6'):
filename = 'data/validate.7.6.txt'
elif _WCSLIB_VER >= Version('7.4'):
filename = 'data/validate.7.4.txt'
elif _WCSLIB_VER >= Version('6.0'):
filename = 'data/validate.6.txt'
elif _WCSLIB_VER >= Version('5.13'):
filename = 'data/validate.5.13.txt'
elif _WCSLIB_VER >= Version('5.0'):
filename = 'data/validate.5.0.txt'
else:
filename = 'data/validate.txt'
with open(get_pkg_data_filename(filename), "r") as fd:
lines = fd.readlines()
assert sorted(set([x.strip() for x in lines])) == results_txt
def test_validate_with_2_wcses():
# From Issue #2053
with pytest.warns(AstropyUserWarning):
results = wcs.validate(get_pkg_data_filename("data/2wcses.hdr"))
assert "WCS key 'A':" in str(results)
def test_crpix_maps_to_crval():
twcs = wcs.WCS(naxis=2)
twcs.wcs.crval = [251.29, 57.58]
twcs.wcs.cdelt = [1, 1]
twcs.wcs.crpix = [507, 507]
twcs.wcs.pc = np.array([[7.7e-6, 3.3e-5], [3.7e-5, -6.8e-6]])
twcs._naxis = [1014, 1014]
twcs.wcs.ctype = ['RA---TAN-SIP', 'DEC--TAN-SIP']
a = np.array(
[[0, 0, 5.33092692e-08, 3.73753773e-11, -2.02111473e-13],
[0, 2.44084308e-05, 2.81394789e-11, 5.17856895e-13, 0.0],
[-2.41334657e-07, 1.29289255e-10, 2.35753629e-14, 0.0, 0.0],
[-2.37162007e-10, 5.43714947e-13, 0.0, 0.0, 0.0],
[-2.81029767e-13, 0.0, 0.0, 0.0, 0.0]]
)
b = np.array(
[[0, 0, 2.99270374e-05, -2.38136074e-10, 7.23205168e-13],
[0, -1.71073858e-07, 6.31243431e-11, -5.16744347e-14, 0.0],
[6.95458963e-06, -3.08278961e-10, -1.75800917e-13, 0.0, 0.0],
[3.51974159e-11, 5.60993016e-14, 0.0, 0.0, 0.0],
[-5.92438525e-13, 0.0, 0.0, 0.0, 0.0]]
)
twcs.sip = wcs.Sip(a, b, None, None, twcs.wcs.crpix)
twcs.wcs.set()
pscale = np.sqrt(wcs.utils.proj_plane_pixel_area(twcs))
# test that CRPIX maps to CRVAL:
assert_allclose(
twcs.wcs_pix2world(*twcs.wcs.crpix, 1), twcs.wcs.crval,
rtol=0.0, atol=1e-6 * pscale
)
# test that CRPIX maps to CRVAL:
assert_allclose(
twcs.all_pix2world(*twcs.wcs.crpix, 1), twcs.wcs.crval,
rtol=0.0, atol=1e-6 * pscale
)
def test_all_world2pix(fname=None, ext=0,
tolerance=1.0e-4, origin=0,
random_npts=25000,
adaptive=False, maxiter=20,
detect_divergence=True):
"""Test all_world2pix, iterative inverse of all_pix2world"""
# Open test FITS file:
if fname is None:
fname = get_pkg_data_filename('data/j94f05bgq_flt.fits')
ext = ('SCI', 1)
if not os.path.isfile(fname):
raise OSError(f"Input file '{fname:s}' to 'test_all_world2pix' not found.")
h = fits.open(fname)
w = wcs.WCS(h[ext].header, h)
h.close()
del h
crpix = w.wcs.crpix
ncoord = crpix.shape[0]
# Assume that CRPIX is at the center of the image and that the image has
# a power-of-2 number of pixels along each axis. Only use the central
# 1/64 for this testing purpose:
naxesi_l = list((7. / 16 * crpix).astype(int))
naxesi_u = list((9. / 16 * crpix).astype(int))
# Generate integer indices of pixels (image grid):
img_pix = np.dstack([i.flatten() for i in
np.meshgrid(*map(range, naxesi_l, naxesi_u))])[0]
# Generage random data (in image coordinates):
with NumpyRNGContext(123456789):
rnd_pix = np.random.rand(random_npts, ncoord)
# Scale random data to cover the central part of the image
mwidth = 2 * (crpix * 1. / 8)
rnd_pix = crpix - 0.5 * mwidth + (mwidth - 1) * rnd_pix
# Reference pixel coordinates in image coordinate system (CS):
test_pix = np.append(img_pix, rnd_pix, axis=0)
# Reference pixel coordinates in sky CS using forward transformation:
all_world = w.all_pix2world(test_pix, origin)
try:
runtime_begin = datetime.now()
# Apply the inverse iterative process to pixels in world coordinates
# to recover the pixel coordinates in image space.
all_pix = w.all_world2pix(
all_world, origin, tolerance=tolerance, adaptive=adaptive,
maxiter=maxiter, detect_divergence=detect_divergence)
runtime_end = datetime.now()
except wcs.wcs.NoConvergence as e:
runtime_end = datetime.now()
ndiv = 0
if e.divergent is not None:
ndiv = e.divergent.shape[0]
print(f"There are {ndiv} diverging solutions.")
print(f"Indices of diverging solutions:\n{e.divergent}")
print(f"Diverging solutions:\n{e.best_solution[e.divergent]}\n")
print("Mean radius of the diverging solutions: {}"
.format(np.mean(
np.linalg.norm(e.best_solution[e.divergent], axis=1))))
print("Mean accuracy of the diverging solutions: {}\n"
.format(np.mean(
np.linalg.norm(e.accuracy[e.divergent], axis=1))))
else:
print("There are no diverging solutions.")
nslow = 0
if e.slow_conv is not None:
nslow = e.slow_conv.shape[0]
print(f"There are {nslow} slowly converging solutions.")
print(f"Indices of slowly converging solutions:\n{e.slow_conv}")
print(f"Slowly converging solutions:\n{e.best_solution[e.slow_conv]}\n")
else:
print("There are no slowly converging solutions.\n")
print("There are {} converged solutions."
.format(e.best_solution.shape[0] - ndiv - nslow))
print(f"Best solutions (all points):\n{e.best_solution}")
print(f"Accuracy:\n{e.accuracy}\n")
print("\nFinished running 'test_all_world2pix' with errors.\n"
"ERROR: {}\nRun time: {}\n"
.format(e.args[0], runtime_end - runtime_begin))
raise e
# Compute differences between reference pixel coordinates and
# pixel coordinates (in image space) recovered from reference
# pixels in world coordinates:
errors = np.sqrt(np.sum(np.power(all_pix - test_pix, 2), axis=1))
meanerr = np.mean(errors)
maxerr = np.amax(errors)
print("\nFinished running 'test_all_world2pix'.\n"
"Mean error = {:e} (Max error = {:e})\n"
"Run time: {}\n"
.format(meanerr, maxerr, runtime_end - runtime_begin))
assert(maxerr < 2.0 * tolerance)
def test_scamp_sip_distortion_parameters():
"""
Test parsing of WCS parameters with redundant SIP and SCAMP distortion
parameters.
"""
header = get_pkg_data_contents('data/validate.fits', encoding='binary')
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(header)
# Just check that this doesn't raise an exception.
w.all_pix2world(0, 0, 0)
def test_fixes2():
"""
From github issue #1854
"""
header = get_pkg_data_contents(
'data/nonstandard_units.hdr', encoding='binary')
with pytest.raises(wcs.InvalidTransformError):
wcs.WCS(header, fix=False)
def test_unit_normalization():
"""
From github issue #1918
"""
header = get_pkg_data_contents(
'data/unit.hdr', encoding='binary')
w = wcs.WCS(header)
assert w.wcs.cunit[2] == 'm/s'
def test_footprint_to_file(tmpdir):
"""
From github issue #1912
"""
# Arbitrary keywords from real data
hdr = {'CTYPE1': 'RA---ZPN', 'CRUNIT1': 'deg',
'CRPIX1': -3.3495999e+02, 'CRVAL1': 3.185790700000e+02,
'CTYPE2': 'DEC--ZPN', 'CRUNIT2': 'deg',
'CRPIX2': 3.0453999e+03, 'CRVAL2': 4.388538000000e+01,
'PV2_1': 1., 'PV2_3': 220., 'NAXIS1': 2048, 'NAXIS2': 1024}
w = wcs.WCS(hdr)
testfile = str(tmpdir.join('test.txt'))
w.footprint_to_file(testfile)
with open(testfile, 'r') as f:
lines = f.readlines()
assert len(lines) == 4
assert lines[2] == 'ICRS\n'
assert 'color=green' in lines[3]
w.footprint_to_file(testfile, coordsys='FK5', color='red')
with open(testfile, 'r') as f:
lines = f.readlines()
assert len(lines) == 4
assert lines[2] == 'FK5\n'
assert 'color=red' in lines[3]
with pytest.raises(ValueError):
w.footprint_to_file(testfile, coordsys='FOO')
del hdr['NAXIS1']
del hdr['NAXIS2']
w = wcs.WCS(hdr)
with pytest.warns(AstropyUserWarning):
w.footprint_to_file(testfile)
# Ignore FITSFixedWarning about keyrecords following the END keyrecord were
# ignored, which comes from src/astropy_wcs.c . Only a blind catch like this
# seems to work when pytest warnings are turned into exceptions.
@pytest.mark.filterwarnings('ignore')
def test_validate_faulty_wcs():
"""
From github issue #2053
"""
h = fits.Header()
# Illegal WCS:
h['RADESYSA'] = 'ICRS'
h['PV2_1'] = 1.0
hdu = fits.PrimaryHDU([[0]], header=h)
hdulist = fits.HDUList([hdu])
# Check that this doesn't raise a NameError exception
wcs.validate(hdulist)
def test_error_message():
header = get_pkg_data_contents(
'data/invalid_header.hdr', encoding='binary')
with pytest.raises(wcs.InvalidTransformError):
# Both lines are in here, because 0.4 calls .set within WCS.__init__,
# whereas 0.3 and earlier did not.
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(header, _do_set=False)
w.all_pix2world([[536.0, 894.0]], 0)
def test_out_of_bounds():
# See #2107
header = get_pkg_data_contents('data/zpn-hole.hdr', encoding='binary')
w = wcs.WCS(header)
ra, dec = w.wcs_pix2world(110, 110, 0)
assert np.isnan(ra)
assert np.isnan(dec)
ra, dec = w.wcs_pix2world(0, 0, 0)
assert not np.isnan(ra)
assert not np.isnan(dec)
def test_calc_footprint_1():
fits = get_pkg_data_filename('data/sip.fits')
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(fits)
axes = (1000, 1051)
ref = np.array([[202.39314493, 47.17753352],
[202.71885939, 46.94630488],
[202.94631893, 47.15855022],
[202.72053428, 47.37893142]])
footprint = w.calc_footprint(axes=axes)
assert_allclose(footprint, ref)
def test_calc_footprint_2():
""" Test calc_footprint without distortion. """
fits = get_pkg_data_filename('data/sip.fits')
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(fits)
axes = (1000, 1051)
ref = np.array([[202.39265216, 47.17756518],
[202.7469062, 46.91483312],
[203.11487481, 47.14359319],
[202.76092671, 47.40745948]])
footprint = w.calc_footprint(axes=axes, undistort=False)
assert_allclose(footprint, ref)
def test_calc_footprint_3():
""" Test calc_footprint with corner of the pixel."""
w = wcs.WCS()
w.wcs.ctype = ["GLON-CAR", "GLAT-CAR"]
w.wcs.crpix = [1.5, 5.5]
w.wcs.cdelt = [-0.1, 0.1]
axes = (2, 10)
ref = np.array([[0.1, -0.5],
[0.1, 0.5],
[359.9, 0.5],
[359.9, -0.5]])
footprint = w.calc_footprint(axes=axes, undistort=False, center=False)
assert_allclose(footprint, ref)
def test_sip():
# See #2107
header = get_pkg_data_contents('data/irac_sip.hdr', encoding='binary')
w = wcs.WCS(header)
x0, y0 = w.sip_pix2foc(200, 200, 0)
assert_allclose(72, x0, 1e-3)
assert_allclose(72, y0, 1e-3)
x1, y1 = w.sip_foc2pix(x0, y0, 0)
assert_allclose(200, x1, 1e-3)
assert_allclose(200, y1, 1e-3)
def test_sub_3d_with_sip():
# See #10527
header = get_pkg_data_contents('data/irac_sip.hdr', encoding='binary')
header = fits.Header.fromstring(header)
header['NAXIS'] = 3
header.set('NAXIS3', 64, after=header.index('NAXIS2'))
w = wcs.WCS(header, naxis=2)
assert w.naxis == 2
def test_printwcs(capsys):
"""
Just make sure that it runs
"""
h = get_pkg_data_contents(
'data/spectra/orion-freq-1.hdr', encoding='binary')
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(h)
w.printwcs()
captured = capsys.readouterr()
assert 'WCS Keywords' in captured.out
h = get_pkg_data_contents('data/3d_cd.hdr', encoding='binary')
w = wcs.WCS(h)
w.printwcs()
captured = capsys.readouterr()
assert 'WCS Keywords' in captured.out
def test_invalid_spherical():
header = """
SIMPLE = T / conforms to FITS standard
BITPIX = 8 / array data type
WCSAXES = 2 / no comment
CTYPE1 = 'RA---TAN' / TAN (gnomic) projection
CTYPE2 = 'DEC--TAN' / TAN (gnomic) projection
EQUINOX = 2000.0 / Equatorial coordinates definition (yr)
LONPOLE = 180.0 / no comment
LATPOLE = 0.0 / no comment
CRVAL1 = 16.0531567459 / RA of reference point
CRVAL2 = 23.1148929108 / DEC of reference point
CRPIX1 = 2129 / X reference pixel
CRPIX2 = 1417 / Y reference pixel
CUNIT1 = 'deg ' / X pixel scale units
CUNIT2 = 'deg ' / Y pixel scale units
CD1_1 = -0.00912247310646 / Transformation matrix
CD1_2 = -0.00250608809647 / no comment
CD2_1 = 0.00250608809647 / no comment
CD2_2 = -0.00912247310646 / no comment
IMAGEW = 4256 / Image width, in pixels.
IMAGEH = 2832 / Image height, in pixels.
"""
f = io.StringIO(header)
header = fits.Header.fromtextfile(f)
w = wcs.WCS(header)
x, y = w.wcs_world2pix(211, -26, 0)
assert np.isnan(x) and np.isnan(y)
def test_no_iteration():
# Regression test for #3066
w = wcs.WCS(naxis=2)
with pytest.raises(TypeError) as exc:
iter(w)
assert exc.value.args[0] == "'WCS' object is not iterable"
class NewWCS(wcs.WCS):
pass
w = NewWCS(naxis=2)
with pytest.raises(TypeError) as exc:
iter(w)
assert exc.value.args[0] == "'NewWCS' object is not iterable"
@pytest.mark.skipif('_wcs.__version__[0] < "5"',
reason="TPV only works with wcslib 5.x or later")
def test_sip_tpv_agreement():
sip_header = get_pkg_data_contents(
os.path.join("data", "siponly.hdr"), encoding='binary')
tpv_header = get_pkg_data_contents(
os.path.join("data", "tpvonly.hdr"), encoding='binary')
with pytest.warns(wcs.FITSFixedWarning):
w_sip = wcs.WCS(sip_header)
w_tpv = wcs.WCS(tpv_header)
assert_array_almost_equal(
w_sip.all_pix2world([w_sip.wcs.crpix], 1),
w_tpv.all_pix2world([w_tpv.wcs.crpix], 1))
w_sip2 = wcs.WCS(w_sip.to_header())
w_tpv2 = wcs.WCS(w_tpv.to_header())
assert_array_almost_equal(
w_sip.all_pix2world([w_sip.wcs.crpix], 1),
w_sip2.all_pix2world([w_sip.wcs.crpix], 1))
assert_array_almost_equal(
w_tpv.all_pix2world([w_sip.wcs.crpix], 1),
w_tpv2.all_pix2world([w_sip.wcs.crpix], 1))
assert_array_almost_equal(
w_sip2.all_pix2world([w_sip.wcs.crpix], 1),
w_tpv2.all_pix2world([w_tpv.wcs.crpix], 1))
@pytest.mark.skipif('_wcs.__version__[0] < "5"',
reason="TPV only works with wcslib 5.x or later")
def test_tpv_copy():
# See #3904
tpv_header = get_pkg_data_contents(
os.path.join("data", "tpvonly.hdr"), encoding='binary')
with pytest.warns(wcs.FITSFixedWarning):
w_tpv = wcs.WCS(tpv_header)
ra, dec = w_tpv.wcs_pix2world([0, 100, 200], [0, -100, 200], 0)
assert ra[0] != ra[1] and ra[1] != ra[2]
assert dec[0] != dec[1] and dec[1] != dec[2]
def test_hst_wcs():
path = get_pkg_data_filename("data/dist_lookup.fits.gz")
with fits.open(path) as hdulist:
# wcslib will complain about the distortion parameters if they
# weren't correctly deleted from the header
w = wcs.WCS(hdulist[1].header, hdulist)
# Check pixel scale and area
assert_quantity_allclose(
w.proj_plane_pixel_scales(), [1.38484378e-05, 1.39758488e-05] * u.deg)
assert_quantity_allclose(
w.proj_plane_pixel_area(), 1.93085492e-10 * (u.deg * u.deg))
# Exercise the main transformation functions, mainly just for
# coverage
w.p4_pix2foc([0, 100, 200], [0, -100, 200], 0)
w.det2im([0, 100, 200], [0, -100, 200], 0)
w.cpdis1 = w.cpdis1
w.cpdis2 = w.cpdis2
w.det2im1 = w.det2im1
w.det2im2 = w.det2im2
w.sip = w.sip
w.cpdis1.cdelt = w.cpdis1.cdelt
w.cpdis1.crpix = w.cpdis1.crpix
w.cpdis1.crval = w.cpdis1.crval
w.cpdis1.data = w.cpdis1.data
assert w.sip.a_order == 4
assert w.sip.b_order == 4
assert w.sip.ap_order == 0
assert w.sip.bp_order == 0
assert_array_equal(w.sip.crpix, [2048., 1024.])
wcs.WCS(hdulist[1].header, hdulist)
def test_cpdis_comments():
path = get_pkg_data_filename("data/dist_lookup.fits.gz")
f = fits.open(path)
w = wcs.WCS(f[1].header, f)
hdr = w.to_fits()[0].header
f.close()
wcscards = list(hdr['CPDIS*'].cards) + list(hdr['DP*'].cards)
wcsdict = {k: (v, c) for k, v, c in wcscards}
refcards = [
('CPDIS1', 'LOOKUP', 'Prior distortion function type'),
('DP1.EXTVER', 1.0, 'Version number of WCSDVARR extension'),
('DP1.NAXES', 2.0, 'Number of independent variables in CPDIS function'),
('DP1.AXIS.1', 1.0, 'Axis number of the 1st variable in a CPDIS function'),
('DP1.AXIS.2', 2.0, 'Axis number of the 2nd variable in a CPDIS function'),
('CPDIS2', 'LOOKUP', 'Prior distortion function type'),
('DP2.EXTVER', 2.0, 'Version number of WCSDVARR extension'),
('DP2.NAXES', 2.0, 'Number of independent variables in CPDIS function'),
('DP2.AXIS.1', 1.0, 'Axis number of the 1st variable in a CPDIS function'),
('DP2.AXIS.2', 2.0, 'Axis number of the 2nd variable in a CPDIS function'),
]
assert len(wcsdict) == len(refcards)
for k, v, c in refcards:
assert wcsdict[k] == (v, c)
def test_d2im_comments():
path = get_pkg_data_filename("data/ie6d07ujq_wcs.fits")
f = fits.open(path)
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(f[0].header, f)
f.close()
wcscards = list(w.to_fits()[0].header['D2IM*'].cards)
wcsdict = {k: (v, c) for k, v, c in wcscards}
refcards = [
('D2IMDIS1', 'LOOKUP', 'Detector to image correction type'),
('D2IM1.EXTVER', 1.0, 'Version number of WCSDVARR extension'),
('D2IM1.NAXES', 2.0, 'Number of independent variables in D2IM function'),
('D2IM1.AXIS.1', 1.0, 'Axis number of the 1st variable in a D2IM function'),
('D2IM1.AXIS.2', 2.0, 'Axis number of the 2nd variable in a D2IM function'),
('D2IMDIS2', 'LOOKUP', 'Detector to image correction type'),
('D2IM2.EXTVER', 2.0, 'Version number of WCSDVARR extension'),
('D2IM2.NAXES', 2.0, 'Number of independent variables in D2IM function'),
('D2IM2.AXIS.1', 1.0, 'Axis number of the 1st variable in a D2IM function'),
('D2IM2.AXIS.2', 2.0, 'Axis number of the 2nd variable in a D2IM function'),
# ('D2IMERR1', 0.049, 'Maximum error of D2IM correction for axis 1'),
# ('D2IMERR2', 0.035, 'Maximum error of D2IM correction for axis 2'),
# ('D2IMEXT', 'iref$y7b1516hi_d2i.fits', ''),
]
assert len(wcsdict) == len(refcards)
for k, v, c in refcards:
assert wcsdict[k] == (v, c)
def test_sip_broken():
# This header caused wcslib to segfault because it has a SIP
# specification in a non-default keyword
hdr = get_pkg_data_contents("data/sip-broken.hdr")
wcs.WCS(hdr)
def test_no_truncate_crval():
"""
Regression test for https://github.com/astropy/astropy/issues/4612
"""
w = wcs.WCS(naxis=3)
w.wcs.crval = [50, 50, 2.12345678e11]
w.wcs.cdelt = [1e-3, 1e-3, 1e8]
w.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'FREQ']
w.wcs.set()
header = w.to_header()
for ii in range(3):
assert header[f'CRVAL{ii + 1}'] == w.wcs.crval[ii]
assert header[f'CDELT{ii + 1}'] == w.wcs.cdelt[ii]
def test_no_truncate_crval_try2():
"""
Regression test for https://github.com/astropy/astropy/issues/4612
"""
w = wcs.WCS(naxis=3)
w.wcs.crval = [50, 50, 2.12345678e11]
w.wcs.cdelt = [1e-5, 1e-5, 1e5]
w.wcs.ctype = ['RA---SIN', 'DEC--SIN', 'FREQ']
w.wcs.cunit = ['deg', 'deg', 'Hz']
w.wcs.crpix = [1, 1, 1]
w.wcs.restfrq = 2.34e11
w.wcs.set()
header = w.to_header()
for ii in range(3):
assert header[f'CRVAL{ii + 1}'] == w.wcs.crval[ii]
assert header[f'CDELT{ii + 1}'] == w.wcs.cdelt[ii]
def test_no_truncate_crval_p17():
"""
Regression test for https://github.com/astropy/astropy/issues/5162
"""
w = wcs.WCS(naxis=2)
w.wcs.crval = [50.1234567890123456, 50.1234567890123456]
w.wcs.cdelt = [1e-3, 1e-3]
w.wcs.ctype = ['RA---TAN', 'DEC--TAN']
w.wcs.set()
header = w.to_header()
assert header['CRVAL1'] != w.wcs.crval[0]
assert header['CRVAL2'] != w.wcs.crval[1]
header = w.to_header(relax=wcs.WCSHDO_P17)
assert header['CRVAL1'] == w.wcs.crval[0]
assert header['CRVAL2'] == w.wcs.crval[1]
def test_no_truncate_using_compare():
"""
Regression test for https://github.com/astropy/astropy/issues/4612
This one uses WCS.wcs.compare and some slightly different values
"""
w = wcs.WCS(naxis=3)
w.wcs.crval = [2.409303333333E+02, 50, 2.12345678e11]
w.wcs.cdelt = [1e-3, 1e-3, 1e8]
w.wcs.ctype = ['RA---TAN', 'DEC--TAN', 'FREQ']
w.wcs.set()
w2 = wcs.WCS(w.to_header())
w.wcs.compare(w2.wcs)
def test_passing_ImageHDU():
"""
Passing ImageHDU or PrimaryHDU and comparing it with
wcs initialized from header. For #4493.
"""
path = get_pkg_data_filename('data/validate.fits')
with fits.open(path) as hdulist:
with pytest.warns(wcs.FITSFixedWarning):
wcs_hdu = wcs.WCS(hdulist[0])
wcs_header = wcs.WCS(hdulist[0].header)
assert wcs_hdu.wcs.compare(wcs_header.wcs)
wcs_hdu = wcs.WCS(hdulist[1])
wcs_header = wcs.WCS(hdulist[1].header)
assert wcs_hdu.wcs.compare(wcs_header.wcs)
def test_inconsistent_sip():
"""
Test for #4814
"""
hdr = get_pkg_data_contents("data/sip-broken.hdr")
ctx = ctx_for_v71_dateref_warnings()
with ctx:
w = wcs.WCS(hdr)
with pytest.warns(AstropyWarning):
newhdr = w.to_header(relax=None)
# CTYPE should not include "-SIP" if relax is None
with ctx:
wnew = wcs.WCS(newhdr)
assert all(not ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
newhdr = w.to_header(relax=False)
assert 'A_0_2' not in newhdr
# CTYPE should not include "-SIP" if relax is False
with ctx:
wnew = wcs.WCS(newhdr)
assert all(not ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
with pytest.warns(AstropyWarning):
newhdr = w.to_header(key="C")
assert 'A_0_2' not in newhdr
# Test writing header with a different key
with ctx:
wnew = wcs.WCS(newhdr, key='C')
assert all(not ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
with pytest.warns(AstropyWarning):
newhdr = w.to_header(key=" ")
# Test writing a primary WCS to header
with ctx:
wnew = wcs.WCS(newhdr)
assert all(not ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
# Test that "-SIP" is kept into CTYPE if relax=True and
# "-SIP" was in the original header
newhdr = w.to_header(relax=True)
with ctx:
wnew = wcs.WCS(newhdr)
assert all(ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
assert 'A_0_2' in newhdr
# Test that SIP coefficients are also written out.
assert wnew.sip is not None
# ######### broken header ###########
# Test that "-SIP" is added to CTYPE if relax=True and
# "-SIP" was not in the original header but SIP coefficients
# are present.
with ctx:
w = wcs.WCS(hdr)
w.wcs.ctype = ['RA---TAN', 'DEC--TAN']
newhdr = w.to_header(relax=True)
with ctx:
wnew = wcs.WCS(newhdr)
assert all(ctyp.endswith('-SIP') for ctyp in wnew.wcs.ctype)
def test_bounds_check():
"""Test for #4957"""
w = wcs.WCS(naxis=2)
w.wcs.ctype = ["RA---CAR", "DEC--CAR"]
w.wcs.cdelt = [10, 10]
w.wcs.crval = [-90, 90]
w.wcs.crpix = [1, 1]
w.wcs.bounds_check(False, False)
ra, dec = w.wcs_pix2world(300, 0, 0)
assert_allclose(ra, -180)
assert_allclose(dec, -30)
def test_naxis():
w = wcs.WCS(naxis=2)
w.wcs.crval = [1, 1]
w.wcs.cdelt = [0.1, 0.1]
w.wcs.crpix = [1, 1]
w._naxis = [1000, 500]
assert w.pixel_shape == (1000, 500)
assert w.array_shape == (500, 1000)
w.pixel_shape = (99, 59)
assert w._naxis == [99, 59]
w.array_shape = (45, 23)
assert w._naxis == [23, 45]
assert w.pixel_shape == (23, 45)
w.pixel_shape = None
assert w.pixel_bounds is None
def test_sip_with_altkey():
"""
Test that when creating a WCS object using a key, CTYPE with
that key is looked at and not the primary CTYPE.
fix for #5443.
"""
with fits.open(get_pkg_data_filename('data/sip.fits')) as f:
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(f[0].header)
# create a header with two WCSs.
h1 = w.to_header(relax=True, key='A')
h2 = w.to_header(relax=False)
h1['CTYPE1A'] = "RA---SIN-SIP"
h1['CTYPE2A'] = "DEC--SIN-SIP"
h1.update(h2)
with ctx_for_v71_dateref_warnings():
w = wcs.WCS(h1, key='A')
assert (w.wcs.ctype == np.array(['RA---SIN-SIP', 'DEC--SIN-SIP'])).all()
def test_to_fits_1():
"""
Test to_fits() with LookupTable distortion.
"""
fits_name = get_pkg_data_filename('data/dist.fits')
with pytest.warns(AstropyDeprecationWarning):
w = wcs.WCS(fits_name)
wfits = w.to_fits()
assert isinstance(wfits, fits.HDUList)
assert isinstance(wfits[0], fits.PrimaryHDU)
assert isinstance(wfits[1], fits.ImageHDU)
def test_keyedsip():
"""
Test sip reading with extra key.
"""
hdr_name = get_pkg_data_filename('data/sip-broken.hdr')
header = fits.Header.fromfile(hdr_name)
del header["CRPIX1"]
del header["CRPIX2"]
w = wcs.WCS(header=header, key="A")
assert isinstance(w.sip, wcs.Sip)
assert w.sip.crpix[0] == 2048
assert w.sip.crpix[1] == 1026
def test_zero_size_input():
with fits.open(get_pkg_data_filename('data/sip.fits')) as f:
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(f[0].header)
inp = np.zeros((0, 2))
assert_array_equal(inp, w.all_pix2world(inp, 0))
assert_array_equal(inp, w.all_world2pix(inp, 0))
inp = [], [1]
result = w.all_pix2world([], [1], 0)
assert_array_equal(inp[0], result[0])
assert_array_equal(inp[1], result[1])
result = w.all_world2pix([], [1], 0)
assert_array_equal(inp[0], result[0])
assert_array_equal(inp[1], result[1])
def test_scalar_inputs():
"""
Issue #7845
"""
wcsobj = wcs.WCS(naxis=1)
result = wcsobj.all_pix2world(2, 1)
assert_array_equal(result, [np.array(2.)])
assert result[0].shape == ()
result = wcsobj.all_pix2world([2], 1)
assert_array_equal(result, [np.array([2.])])
assert result[0].shape == (1,)
# Ignore RuntimeWarning raised on s390.
@pytest.mark.filterwarnings('ignore:.*invalid value encountered in.*')
def test_footprint_contains():
"""
Test WCS.footprint_contains(skycoord)
"""
header = """
WCSAXES = 2 / Number of coordinate axes
CRPIX1 = 1045.0 / Pixel coordinate of reference point
CRPIX2 = 1001.0 / Pixel coordinate of reference point
PC1_1 = -0.00556448550786 / Coordinate transformation matrix element
PC1_2 = -0.001042120133257 / Coordinate transformation matrix element
PC2_1 = 0.001181477028705 / Coordinate transformation matrix element
PC2_2 = -0.005590809742987 / Coordinate transformation matrix element
CDELT1 = 1.0 / [deg] Coordinate increment at reference point
CDELT2 = 1.0 / [deg] Coordinate increment at reference point
CUNIT1 = 'deg' / Units of coordinate increment and value
CUNIT2 = 'deg' / Units of coordinate increment and value
CTYPE1 = 'RA---TAN' / TAN (gnomonic) projection + SIP distortions
CTYPE2 = 'DEC--TAN' / TAN (gnomonic) projection + SIP distortions
CRVAL1 = 250.34971683647 / [deg] Coordinate value at reference point
CRVAL2 = 2.2808772582495 / [deg] Coordinate value at reference point
LONPOLE = 180.0 / [deg] Native longitude of celestial pole
LATPOLE = 2.2808772582495 / [deg] Native latitude of celestial pole
RADESYS = 'ICRS' / Equatorial coordinate system
MJD-OBS = 58612.339199259 / [d] MJD of observation matching DATE-OBS
DATE-OBS= '2019-05-09T08:08:26.816Z' / ISO-8601 observation date matching MJD-OB
NAXIS = 2 / NAXIS
NAXIS1 = 2136 / length of first array dimension
NAXIS2 = 2078 / length of second array dimension
""" # noqa
header = fits.Header.fromstring(header.strip(), '\n')
test_wcs = wcs.WCS(header)
hasCoord = test_wcs.footprint_contains(SkyCoord(254, 2, unit='deg'))
assert hasCoord
hasCoord = test_wcs.footprint_contains(SkyCoord(240, 2, unit='deg'))
assert not hasCoord
hasCoord = test_wcs.footprint_contains(SkyCoord(24, 2, unit='deg'))
assert not hasCoord
def test_cunit():
# Initializing WCS
w1 = wcs.WCS(naxis=2)
w2 = wcs.WCS(naxis=2)
w3 = wcs.WCS(naxis=2)
w4 = wcs.WCS(naxis=2)
# Initializing the values of cunit
w1.wcs.cunit = ['deg', 'm/s']
w2.wcs.cunit = ['km/h', 'km/h']
w3.wcs.cunit = ['deg', 'm/s']
w4.wcs.cunit = ['deg', 'deg']
# Equality checking a cunit with itself
assert w1.wcs.cunit == w1.wcs.cunit
assert not w1.wcs.cunit != w1.wcs.cunit
# Equality checking of two different cunit object having same values
assert w1.wcs.cunit == w3.wcs.cunit
assert not w1.wcs.cunit != w3.wcs.cunit
# Equality checking of two different cunit object having the same first unit
# but different second unit (see #9154)
assert not w1.wcs.cunit == w4.wcs.cunit
assert w1.wcs.cunit != w4.wcs.cunit
# Inequality checking of two different cunit object having different values
assert not w1.wcs.cunit == w2.wcs.cunit
assert w1.wcs.cunit != w2.wcs.cunit
# Inequality checking of cunit with a list of literals
assert not w1.wcs.cunit == [1, 2, 3]
assert w1.wcs.cunit != [1, 2, 3]
# Inequality checking with some characters
assert not w1.wcs.cunit == ['a', 'b', 'c']
assert w1.wcs.cunit != ['a', 'b', 'c']
# Comparison is not implemented TypeError will raise
with pytest.raises(TypeError):
w1.wcs.cunit < w2.wcs.cunit
class TestWcsWithTime:
def setup(self):
if _WCSLIB_VER >= Version('7.1'):
fname = get_pkg_data_filename('data/header_with_time_wcslib71.fits')
else:
fname = get_pkg_data_filename('data/header_with_time.fits')
self.header = fits.Header.fromfile(fname)
with pytest.warns(wcs.FITSFixedWarning):
self.w = wcs.WCS(self.header, key='A')
def test_keywods2wcsprm(self):
""" Make sure Wcsprm is populated correctly from the header."""
ctype = [self.header[val] for val in self.header["CTYPE*"]]
crval = [self.header[val] for val in self.header["CRVAL*"]]
crpix = [self.header[val] for val in self.header["CRPIX*"]]
cdelt = [self.header[val] for val in self.header["CDELT*"]]
cunit = [self.header[val] for val in self.header["CUNIT*"]]
assert list(self.w.wcs.ctype) == ctype
assert list(self.w.wcs.axis_types) == [2200, 2201, 3300, 0]
assert_allclose(self.w.wcs.crval, crval)
assert_allclose(self.w.wcs.crpix, crpix)
assert_allclose(self.w.wcs.cdelt, cdelt)
assert list(self.w.wcs.cunit) == cunit
naxis = self.w.naxis
assert naxis == 4
pc = np.zeros((naxis, naxis), dtype=np.float64)
for i in range(1, 5):
for j in range(1, 5):
if i == j:
pc[i-1, j-1] = self.header.get(f'PC{i}_{j}A', 1)
else:
pc[i-1, j-1] = self.header.get(f'PC{i}_{j}A', 0)
assert_allclose(self.w.wcs.pc, pc)
char_keys = ['timesys', 'trefpos', 'trefdir', 'plephem', 'timeunit',
'dateref', 'dateobs', 'datebeg', 'dateavg', 'dateend']
for key in char_keys:
assert getattr(self.w.wcs, key) == self.header.get(key, "")
num_keys = ['mjdref', 'mjdobs', 'mjdbeg', 'mjdend',
'jepoch', 'bepoch', 'tstart', 'tstop', 'xposure',
'timsyer', 'timrder', 'timedel', 'timepixr',
'timeoffs', 'telapse', 'czphs', 'cperi']
for key in num_keys:
if key.upper() == 'MJDREF':
hdrv = [self.header.get('MJDREFIA', np.nan),
self.header.get('MJDREFFA', np.nan)]
else:
hdrv = self.header.get(key, np.nan)
assert_allclose(getattr(self.w.wcs, key), hdrv)
def test_transforms(self):
assert_allclose(self.w.all_pix2world(*self.w.wcs.crpix, 1),
self.w.wcs.crval)
def test_invalid_coordinate_masking():
# Regression test for an issue which caused all coordinates to be set to NaN
# after a transformation rather than just the invalid ones as reported by
# WCSLIB. A specific example of this is that when considering an all-sky
# spectral cube with a spectral axis that is not correlated with the sky
# axes, if transforming pixel coordinates that did not fall 'in' the sky,
# the spectral world value was also masked even though that coordinate
# was valid.
w = wcs.WCS(naxis=3)
w.wcs.ctype = 'VELO_LSR', 'GLON-CAR', 'GLAT-CAR'
w.wcs.crval = -20, 0, 0
w.wcs.crpix = 1, 1441, 241
w.wcs.cdelt = 1.3, -0.125, 0.125
px = [-10, -10, 20]
py = [-10, 10, 20]
pz = [-10, 10, 20]
wx, wy, wz = w.wcs_pix2world(px, py, pz, 0)
# Before fixing this, wx used to return np.nan for the first element
assert_allclose(wx, [-33, -33, 6])
assert_allclose(wy, [np.nan, 178.75, 177.5])
assert_allclose(wz, [np.nan, -28.75, -27.5])
def test_no_pixel_area():
w = wcs.WCS(naxis=3)
# Pixel area cannot be computed
with pytest.raises(ValueError, match='Pixel area is defined only for 2D pixels'):
w.proj_plane_pixel_area()
# Pixel scales still possible
assert_quantity_allclose(w.proj_plane_pixel_scales(), 1)
def test_distortion_header(tmpdir):
"""
Test that plate distortion model is correctly described by `wcs.to_header()`
and preserved when creating a Cutout2D from the image, writing it to FITS,
and reading it back from the file.
"""
path = get_pkg_data_filename("data/dss.14.29.56-62.41.05.fits.gz")
cen = np.array((50, 50))
siz = np.array((20, 20))
with fits.open(path) as hdulist:
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(hdulist[0].header)
cut = Cutout2D(hdulist[0].data, position=cen, size=siz, wcs=w)
# This converts the DSS plate solution model with AMD[XY]n coefficients into a
# Template Polynomial Distortion model (TPD.FWD.n coefficients);
# not testing explicitly for the header keywords here.
if _WCSLIB_VER < Version("7.4"):
with pytest.warns(AstropyWarning, match="WCS contains a TPD distortion model in CQDIS"):
w0 = wcs.WCS(w.to_header_string())
with pytest.warns(AstropyWarning, match="WCS contains a TPD distortion model in CQDIS"):
w1 = wcs.WCS(cut.wcs.to_header_string())
if _WCSLIB_VER >= Version("7.1"):
pytest.xfail("TPD coefficients incomplete with WCSLIB >= 7.1 < 7.4")
else:
w0 = wcs.WCS(w.to_header_string())
w1 = wcs.WCS(cut.wcs.to_header_string())
assert w.pixel_to_world(0, 0).separation(w0.pixel_to_world(0, 0)) < 1.e-3 * u.mas
assert w.pixel_to_world(*cen).separation(w0.pixel_to_world(*cen)) < 1.e-3 * u.mas
assert w.pixel_to_world(*cen).separation(w1.pixel_to_world(*(siz / 2))) < 1.e-3 * u.mas
cutfile = str(tmpdir.join('cutout.fits'))
fits.writeto(cutfile, cut.data, cut.wcs.to_header())
with fits.open(cutfile) as hdulist:
w2 = wcs.WCS(hdulist[0].header)
assert w.pixel_to_world(*cen).separation(w2.pixel_to_world(*(siz / 2))) < 1.e-3 * u.mas
def test_pixlist_wcs_colsel():
"""
Test selection of a specific pixel list WCS using ``colsel``. See #11412.
"""
hdr_file = get_pkg_data_filename('data/chandra-pixlist-wcs.hdr')
hdr = fits.Header.fromtextfile(hdr_file)
with pytest.warns(wcs.FITSFixedWarning):
w = wcs.WCS(hdr, keysel=['image', 'pixel'], colsel=[11, 12])
assert w.naxis == 2
assert list(w.wcs.ctype) == ['RA---TAN', 'DEC--TAN']
assert np.allclose(w.wcs.crval, [229.38051931869, -58.81108068885])
assert np.allclose(w.wcs.pc, [[1, 0], [0, 1]])
assert np.allclose(w.wcs.cdelt, [-0.00013666666666666, 0.00013666666666666])
assert np.allclose(w.wcs.lonpole, 180.)
| 34.556209
| 109
| 0.611583
|
a16ff8963370672c5012f98f105587cb0a6cd943
| 153
|
py
|
Python
|
chainer_dense_fusion/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 22
|
2019-01-31T23:50:30.000Z
|
2021-09-13T09:41:00.000Z
|
chainer_dense_fusion/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 4
|
2019-07-31T14:40:06.000Z
|
2022-03-16T13:32:45.000Z
|
chainer_dense_fusion/__init__.py
|
knorth55/chainer-dense-fusion
|
8ff53173d7071fc2cfcd05b1e0b2c544aeed090b
|
[
"MIT"
] | 3
|
2019-08-30T09:18:45.000Z
|
2020-03-03T16:07:51.000Z
|
from chainer_dense_fusion import datasets # NOQA
from chainer_dense_fusion import links # NOQA
from chainer_dense_fusion import visualizations # NOQA
| 38.25
| 55
| 0.843137
|
d6f97a3835549c1b0ec876b2f0c8b1f086360b7e
| 1,455
|
py
|
Python
|
bsp/gd32/tools/sdk_dist.py
|
BreederBai/rt-thread
|
53ed0314982556dfa9c5db75d4f3e02485d16ab5
|
[
"Apache-2.0"
] | 4
|
2018-01-17T03:12:59.000Z
|
2020-04-13T06:55:07.000Z
|
bsp/gd32/tools/sdk_dist.py
|
BreederBai/rt-thread
|
53ed0314982556dfa9c5db75d4f3e02485d16ab5
|
[
"Apache-2.0"
] | 1
|
2018-12-20T00:02:50.000Z
|
2018-12-20T00:02:50.000Z
|
bsp/gd32/tools/sdk_dist.py
|
BreederBai/rt-thread
|
53ed0314982556dfa9c5db75d4f3e02485d16ab5
|
[
"Apache-2.0"
] | 1
|
2022-01-12T07:33:32.000Z
|
2022-01-12T07:33:32.000Z
|
import os
import sys
import shutil
cwd_path = os.getcwd()
sys.path.append(os.path.join(os.path.dirname(cwd_path), 'rt-thread', 'tools'))
def bsp_update_board_kconfig(dist_dir):
# change board/kconfig path
if not os.path.isfile(os.path.join(dist_dir, 'board/Kconfig')):
return
with open(os.path.join(dist_dir, 'board/Kconfig'), 'r') as f:
data = f.readlines()
with open(os.path.join(dist_dir, 'board/Kconfig'), 'w') as f:
for line in data:
if line.find('../libraries/gd32_drivers/Kconfig') != -1:
position = line.find('../libraries/gd32_drivers/Kconfig')
line = line[0:position] + 'libraries/gd32_drivers/Kconfig"\n'
f.write(line)
# BSP dist function
def dist_do_building(BSP_ROOT, dist_dir):
from mkdist import bsp_copy_files
import rtconfig
print("=> copy gd32 bsp library")
library_dir = os.path.join(dist_dir, 'libraries')
library_path = os.path.join(os.path.dirname(BSP_ROOT), 'libraries')
bsp_copy_files(os.path.join(library_path, rtconfig.BSP_LIBRARY_TYPE),
os.path.join(library_dir, rtconfig.BSP_LIBRARY_TYPE))
print("=> copy bsp drivers")
bsp_copy_files(os.path.join(library_path, 'gd32_drivers'), os.path.join(library_dir, 'gd32_drivers'))
shutil.copyfile(os.path.join(library_path, 'Kconfig'), os.path.join(library_dir, 'Kconfig'))
bsp_update_board_kconfig(dist_dir)
| 38.289474
| 105
| 0.671478
|
59c3011f4da19f5c3839838d44d289adecbc8ef0
| 647
|
py
|
Python
|
tests/test_utils.py
|
hongningsg/Machine-Learning-and-Deep-Learning
|
84233a77ac8e736ad4fb26fb8c82995c4de64047
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
hongningsg/Machine-Learning-and-Deep-Learning
|
84233a77ac8e736ad4fb26fb8c82995c4de64047
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
hongningsg/Machine-Learning-and-Deep-Learning
|
84233a77ac8e736ad4fb26fb8c82995c4de64047
|
[
"MIT"
] | null | null | null |
from utils import normalize
import unittest
import numpy as np
class TestUtils(unittest.TestCase):
def test_normalize(self):
X = np.array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
normalized_X = normalize(X)
np.testing.assert_array_almost_equal(normalized_X, np.array([[-0.63299316, -0.04124145, 0.55051026],
[ 2.36700684, 2.95875855, 3.55051026],
[ 5.36700684, 5.95875855, 6.55051026]]))
if __name__ == '__main__':
unittest.main()
| 32.35
| 110
| 0.474498
|
2f14e73edafe8f9ef6bb13093faa8707270d34c4
| 1,647
|
py
|
Python
|
examples/plot_time_series.py
|
prakashabhale/fluidfoam
|
69b156b42b80169ff4c757554cba4f2784a79f82
|
[
"CECILL-B"
] | 64
|
2018-03-08T21:52:48.000Z
|
2022-03-31T08:11:17.000Z
|
examples/plot_time_series.py
|
CyrilleBonamy/fluidfoam
|
2befa35722aa7aa13ac7d48884461614d608262b
|
[
"CECILL-B"
] | 14
|
2019-12-12T10:56:10.000Z
|
2022-03-31T15:32:47.000Z
|
examples/plot_time_series.py
|
CyrilleBonamy/fluidfoam
|
2befa35722aa7aa13ac7d48884461614d608262b
|
[
"CECILL-B"
] | 21
|
2019-09-25T21:34:43.000Z
|
2022-03-31T14:49:12.000Z
|
"""
Read and Plot a time series of OpenFoam scalar field
====================================================
This example reads and plots a time series of an OpenFoam scalar field
"""
###############################################################################
# Gets the time directories
# -------------------------
#
# .. note:: Tries if directory is a number and adds it in the time array
import os
import numpy as np
sol = '../output_samples/box/'
dir_list = os.listdir(sol)
time_list = []
for directory in dir_list:
try:
float(directory)
time_list.append(directory)
except:
pass
time_list.sort(key=float)
time_list=np.array(time_list)
###############################################################################
# Reads a scalar value at a given position for different times
# ------------------------------------------------------------
#
# .. note:: It reads the scalar field p at position 20 and stores it in the
# numpy array time_series
# import readvector function from fluidfoam package
from fluidfoam import readscalar
sol = '../output_samples/box/'
time_series = np.empty(0)
for timename in time_list:
p = readscalar(sol, timename, 'p')
time_series = np.append(time_series, p[20])
###############################################################################
# Now plots the time series
# -------------------------
#
import matplotlib.pyplot as plt
plt.figure()
# Converts strings to float for plot
time_list = [float(i) for i in time_list]
plt.plot(time_list, time_series)
# Setting axis labels
plt.xlabel('t (s)')
plt.ylabel('p (Pa)')
# add grid
plt.grid()
| 24.954545
| 79
| 0.53309
|
1c05712c28478b2d8dcb98b92beaaa738c31bbe8
| 26,819
|
py
|
Python
|
Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/tensorflow/contrib/estimator/python/estimator/rnn.py
|
JustinACoder/H22-GR3-UnrealAI
|
361eb9ef1147f8a2991e5f98c4118cd823184adf
|
[
"MIT"
] | 6
|
2022-02-04T18:12:24.000Z
|
2022-03-21T23:57:12.000Z
|
Lib/site-packages/tensorflow/contrib/estimator/python/estimator/rnn.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
Lib/site-packages/tensorflow/contrib/estimator/python/estimator/rnn.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-08T03:53:23.000Z
|
2022-02-08T03:53:23.000Z
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Recurrent Neural Network estimators."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.estimator.python.estimator import extenders
from tensorflow.contrib.feature_column.python.feature_column import sequence_feature_column as seq_fc
from tensorflow.python.estimator import estimator
from tensorflow.python.estimator.canned import head as head_lib
from tensorflow.python.estimator.canned import optimizers
from tensorflow.python.feature_column import feature_column as feature_column_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.layers import core as core_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import rnn
from tensorflow.python.ops import rnn_cell
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops.losses import losses
from tensorflow.python.summary import summary
from tensorflow.python.training import optimizer as optimizer_lib
from tensorflow.python.training import training_util
# The defaults are historical artifacts of the initial implementation, but seem
# reasonable choices.
_DEFAULT_LEARNING_RATE = 0.05
_DEFAULT_CLIP_NORM = 5.0
_CELL_TYPES = {'basic_rnn': rnn_cell.BasicRNNCell,
'lstm': rnn_cell.BasicLSTMCell,
'gru': rnn_cell.GRUCell}
# Indicates no value was provided by the user to a kwarg.
USE_DEFAULT = object()
def _single_rnn_cell(num_units, cell_type):
cell_type = _CELL_TYPES.get(cell_type, cell_type)
if not cell_type or not issubclass(cell_type, rnn_cell.RNNCell):
raise ValueError('Supported cell types are {}; got {}'.format(
list(_CELL_TYPES.keys()), cell_type))
return cell_type(num_units=num_units)
def _make_rnn_cell_fn(num_units, cell_type='basic_rnn'):
"""Convenience function to create `rnn_cell_fn` for canned RNN Estimators.
Args:
num_units: Iterable of integer number of hidden units per RNN layer.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`.
Returns:
A function that takes a single argument, an instance of
`tf.estimator.ModeKeys`, and returns an instance derived from
`tf.nn.rnn_cell.RNNCell`.
Raises:
ValueError: If cell_type is not supported.
"""
def rnn_cell_fn(mode):
# Unused. Part of the rnn_cell_fn interface since user specified functions
# may need different behavior across modes (e.g. dropout).
del mode
cells = [_single_rnn_cell(n, cell_type) for n in num_units]
if len(cells) == 1:
return cells[0]
return rnn_cell.MultiRNNCell(cells)
return rnn_cell_fn
def _concatenate_context_input(sequence_input, context_input):
"""Replicates `context_input` across all timesteps of `sequence_input`.
Expands dimension 1 of `context_input` then tiles it `sequence_length` times.
This value is appended to `sequence_input` on dimension 2 and the result is
returned.
Args:
sequence_input: A `Tensor` of dtype `float32` and shape `[batch_size,
padded_length, d0]`.
context_input: A `Tensor` of dtype `float32` and shape `[batch_size, d1]`.
Returns:
A `Tensor` of dtype `float32` and shape `[batch_size, padded_length,
d0 + d1]`.
Raises:
ValueError: If `sequence_input` does not have rank 3 or `context_input` does
not have rank 2.
"""
seq_rank_check = check_ops.assert_rank(
sequence_input,
3,
message='sequence_input must have rank 3',
data=[array_ops.shape(sequence_input)])
seq_type_check = check_ops.assert_type(
sequence_input,
dtypes.float32,
message='sequence_input must have dtype float32; got {}.'.format(
sequence_input.dtype))
ctx_rank_check = check_ops.assert_rank(
context_input,
2,
message='context_input must have rank 2',
data=[array_ops.shape(context_input)])
ctx_type_check = check_ops.assert_type(
context_input,
dtypes.float32,
message='context_input must have dtype float32; got {}.'.format(
context_input.dtype))
with ops.control_dependencies(
[seq_rank_check, seq_type_check, ctx_rank_check, ctx_type_check]):
padded_length = array_ops.shape(sequence_input)[1]
tiled_context_input = array_ops.tile(
array_ops.expand_dims(context_input, 1),
array_ops.concat([[1], [padded_length], [1]], 0))
return array_ops.concat([sequence_input, tiled_context_input], 2)
def _select_last_activations(activations, sequence_lengths):
"""Selects the nth set of activations for each n in `sequence_length`.
Returns a `Tensor` of shape `[batch_size, k]`. If `sequence_length` is not
`None`, then `output[i, :] = activations[i, sequence_length[i] - 1, :]`. If
`sequence_length` is `None`, then `output[i, :] = activations[i, -1, :]`.
Args:
activations: A `Tensor` with shape `[batch_size, padded_length, k]`.
sequence_lengths: A `Tensor` with shape `[batch_size]` or `None`.
Returns:
A `Tensor` of shape `[batch_size, k]`.
"""
with ops.name_scope(
'select_last_activations', values=[activations, sequence_lengths]):
activations_shape = array_ops.shape(activations)
batch_size = activations_shape[0]
padded_length = activations_shape[1]
output_units = activations_shape[2]
if sequence_lengths is None:
sequence_lengths = padded_length
start_indices = math_ops.to_int64(
math_ops.range(batch_size) * padded_length)
last_indices = start_indices + sequence_lengths - 1
reshaped_activations = array_ops.reshape(
activations, [batch_size * padded_length, output_units])
last_activations = array_ops.gather(reshaped_activations, last_indices)
last_activations.set_shape([activations.shape[0], activations.shape[2]])
return last_activations
def _rnn_logit_fn_builder(output_units, rnn_cell_fn, sequence_feature_columns,
context_feature_columns, input_layer_partitioner):
"""Function builder for a rnn logit_fn.
Args:
output_units: An int indicating the dimension of the logit layer.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell`.
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input.
context_feature_columns: An iterable containing the `FeatureColumn`s
that represent contextual input.
input_layer_partitioner: Partitioner for input layer.
Returns:
A logit_fn (see below).
Raises:
ValueError: If output_units is not an int.
"""
if not isinstance(output_units, int):
raise ValueError('output_units must be an int. Given type: {}'.format(
type(output_units)))
def rnn_logit_fn(features, mode):
"""Recurrent Neural Network logit_fn.
Args:
features: This is the first item returned from the `input_fn`
passed to `train`, `evaluate`, and `predict`. This should be a
single `Tensor` or `dict` of same.
mode: Optional. Specifies if this training, evaluation or prediction. See
`ModeKeys`.
Returns:
A `Tensor` representing the logits.
"""
with variable_scope.variable_scope(
'sequence_input_layer',
values=tuple(six.itervalues(features)),
partitioner=input_layer_partitioner):
sequence_input, sequence_length = seq_fc.sequence_input_layer(
features=features, feature_columns=sequence_feature_columns)
summary.histogram('sequence_length', sequence_length)
if context_feature_columns:
context_input = feature_column_lib.input_layer(
features=features,
feature_columns=context_feature_columns)
sequence_input = _concatenate_context_input(sequence_input,
context_input)
cell = rnn_cell_fn(mode)
# Ignore output state.
rnn_outputs, _ = rnn.dynamic_rnn(
cell=cell,
inputs=sequence_input,
sequence_length=sequence_length,
dtype=dtypes.float32,
time_major=False)
last_activations = _select_last_activations(rnn_outputs, sequence_length)
with variable_scope.variable_scope('logits', values=(rnn_outputs,)):
logits = core_layers.dense(
last_activations,
units=output_units,
activation=None,
kernel_initializer=init_ops.glorot_uniform_initializer())
return logits
return rnn_logit_fn
def _rnn_model_fn(features,
labels,
mode,
head,
rnn_cell_fn,
sequence_feature_columns,
context_feature_columns,
optimizer='Adagrad',
input_layer_partitioner=None,
config=None):
"""Recurrent Neural Net model_fn.
Args:
features: dict of `Tensor` and `SparseTensor` objects returned from
`input_fn`.
labels: `Tensor` of shape [batch_size, 1] or [batch_size] with labels.
mode: Defines whether this is training, evaluation or prediction.
See `ModeKeys`.
head: A `head_lib._Head` instance.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell`.
sequence_feature_columns: Iterable containing `FeatureColumn`s that
represent sequential model inputs.
context_feature_columns: Iterable containing `FeatureColumn`s that
represent model inputs not associated with a specific timestep.
optimizer: String, `tf.Optimizer` object, or callable that creates the
optimizer to use for training. If not specified, will use the Adagrad
optimizer with a default learning rate of 0.05 and gradient clip norm of
5.0.
input_layer_partitioner: Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Returns:
An `EstimatorSpec` instance.
Raises:
ValueError: If mode or optimizer is invalid, or features has the wrong type.
"""
if not isinstance(features, dict):
raise ValueError('features should be a dictionary of `Tensor`s. '
'Given type: {}'.format(type(features)))
# If user does not provide an optimizer instance, use the optimizer specified
# by the string with default learning rate and gradient clipping.
if not isinstance(optimizer, optimizer_lib.Optimizer):
optimizer = optimizers.get_optimizer_instance(
optimizer, learning_rate=_DEFAULT_LEARNING_RATE)
optimizer = extenders.clip_gradients_by_norm(optimizer, _DEFAULT_CLIP_NORM)
num_ps_replicas = config.num_ps_replicas if config else 0
partitioner = partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas)
with variable_scope.variable_scope(
'rnn',
values=tuple(six.itervalues(features)),
partitioner=partitioner):
input_layer_partitioner = input_layer_partitioner or (
partitioned_variables.min_max_variable_partitioner(
max_partitions=num_ps_replicas,
min_slice_size=64 << 20))
logit_fn = _rnn_logit_fn_builder(
output_units=head.logits_dimension,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=sequence_feature_columns,
context_feature_columns=context_feature_columns,
input_layer_partitioner=input_layer_partitioner)
logits = logit_fn(features=features, mode=mode)
def _train_op_fn(loss):
"""Returns the op to optimize the loss."""
return optimizer.minimize(
loss,
global_step=training_util.get_global_step())
return head.create_estimator_spec(
features=features,
mode=mode,
labels=labels,
train_op_fn=_train_op_fn,
logits=logits)
def _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type):
"""Assert arguments are valid and return rnn_cell_fn."""
if rnn_cell_fn and (num_units or cell_type != USE_DEFAULT):
raise ValueError(
'num_units and cell_type must not be specified when using rnn_cell_fn'
)
if not rnn_cell_fn:
if cell_type == USE_DEFAULT:
cell_type = 'basic_rnn'
rnn_cell_fn = _make_rnn_cell_fn(num_units, cell_type)
return rnn_cell_fn
class RNNClassifier(estimator.Estimator):
"""A classifier for TensorFlow RNN models.
Trains a recurrent neural network model to classify instances into one of
multiple classes.
Example:
```python
token_sequence = sequence_categorical_column_with_hash_bucket(...)
token_emb = embedding_column(categorical_column=token_sequence, ...)
estimator = RNNClassifier(
sequence_feature_columns=[token_emb],
num_units=[32, 16], cell_type='lstm')
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column` is not `None`, a feature with
`key=weight_column` whose value is a `Tensor`.
* for each `column` in `sequence_feature_columns`:
- a feature with `key=column.name` whose `value` is a `SparseTensor`.
* for each `column` in `context_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss is calculated by using softmax cross entropy.
@compatibility(eager)
Estimators are not compatible with eager execution.
@end_compatibility
"""
def __init__(self,
sequence_feature_columns,
context_feature_columns=None,
num_units=None,
cell_type=USE_DEFAULT,
rnn_cell_fn=None,
model_dir=None,
n_classes=2,
weight_column=None,
label_vocabulary=None,
optimizer='Adagrad',
loss_reduction=losses.Reduction.SUM_OVER_BATCH_SIZE,
input_layer_partitioner=None,
config=None):
"""Initializes a `RNNClassifier` instance.
Args:
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input. All items in the set should either be
sequence columns (e.g. `sequence_numeric_column`) or constructed from
one (e.g. `embedding_column` with `sequence_categorical_column_*` as
input).
context_feature_columns: An iterable containing the `FeatureColumn`s
for contextual input. The data represented by these columns will be
replicated and given to the RNN at each timestep. These columns must be
instances of classes derived from `_DenseColumn` such as
`numeric_column`, not the sequential variants.
num_units: Iterable of integer number of hidden units per RNN layer. If
set, `cell_type` must also be specified and `rnn_cell_fn` must be
`None`.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`. If set, `num_units` must also be specified and `rnn_cell_fn`
must be `None`.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell` that will be used to
construct the RNN. If set, `num_units` and `cell_type` cannot be set.
This is for advanced users who need additional customization beyond
`num_units` and `cell_type`. Note that `tf.nn.rnn_cell.MultiRNNCell` is
needed for stacked RNNs.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
n_classes: Number of label classes. Defaults to 2, namely binary
classification. Must be > 1.
weight_column: A string or a `_NumericColumn` created by
`tf.feature_column.numeric_column` defining feature column representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example. If it is a string, it is
used as a key to fetch weight tensor from the `features`. If it is a
`_NumericColumn`, raw tensor is fetched by key `weight_column.key`,
then weight_column.normalizer_fn is applied on it to get weight tensor.
label_vocabulary: A list of strings represents possible label values. If
given, labels must be string type and have any value in
`label_vocabulary`. If it is not given, that means labels are
already encoded as integer or float within [0, 1] for `n_classes=2` and
encoded as integer values in {0, 1,..., n_classes-1} for `n_classes`>2 .
Also there will be errors if vocabulary is not provided and labels are
string.
optimizer: An instance of `tf.Optimizer` or string specifying optimizer
type. Defaults to Adagrad optimizer.
loss_reduction: One of `tf.losses.Reduction` except `NONE`. Describes how
to reduce training loss over batch. Defaults to `SUM_OVER_BATCH_SIZE`.
input_layer_partitioner: Optional. Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Raises:
ValueError: If `num_units`, `cell_type`, and `rnn_cell_fn` are not
compatible.
"""
rnn_cell_fn = _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type)
if n_classes == 2:
head = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss( # pylint: disable=protected-access
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
else:
head = head_lib._multi_class_head_with_softmax_cross_entropy_loss( # pylint: disable=protected-access
n_classes,
weight_column=weight_column,
label_vocabulary=label_vocabulary,
loss_reduction=loss_reduction)
def _model_fn(features, labels, mode, config):
return _rnn_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=tuple(sequence_feature_columns or []),
context_feature_columns=tuple(context_feature_columns or []),
optimizer=optimizer,
input_layer_partitioner=input_layer_partitioner,
config=config)
super(RNNClassifier, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config)
class RNNEstimator(estimator.Estimator):
"""An Estimator for TensorFlow RNN models with user-specified head.
Example:
```python
token_sequence = sequence_categorical_column_with_hash_bucket(...)
token_emb = embedding_column(categorical_column=token_sequence, ...)
estimator = RNNEstimator(
head=tf.contrib.estimator.regression_head(),
sequence_feature_columns=[token_emb],
num_units=[32, 16], cell_type='lstm')
# Or with custom RNN cell:
def rnn_cell_fn(mode):
cells = [ tf.contrib.rnn.LSTMCell(size) for size in [32, 16] ]
if mode == tf.estimator.ModeKeys.TRAIN:
cells = [ tf.contrib.rnn.DropoutWrapper(cell, input_keep_prob=0.5)
for cell in cells ]
return tf.contrib.rnn.MultiRNNCell(cells)
estimator = RNNEstimator(
head=tf.contrib.estimator.regression_head(),
sequence_feature_columns=[token_emb],
rnn_cell_fn=rnn_cell_fn)
# Input builders
def input_fn_train: # returns x, y
pass
estimator.train(input_fn=input_fn_train, steps=100)
def input_fn_eval: # returns x, y
pass
metrics = estimator.evaluate(input_fn=input_fn_eval, steps=10)
def input_fn_predict: # returns x, None
pass
predictions = estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if the head's `weight_column` is not `None`, a feature with
`key=weight_column` whose value is a `Tensor`.
* for each `column` in `sequence_feature_columns`:
- a feature with `key=column.name` whose `value` is a `SparseTensor`.
* for each `column` in `context_feature_columns`:
- if `column` is a `_CategoricalColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `_WeightedCategoricalColumn`, two features: the first
with `key` the id column name, the second with `key` the weight column
name. Both features' `value` must be a `SparseTensor`.
- if `column` is a `_DenseColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss and predicted output are determined by the specified head.
@compatibility(eager)
Estimators are not compatible with eager execution.
@end_compatibility
"""
def __init__(self,
head,
sequence_feature_columns,
context_feature_columns=None,
num_units=None,
cell_type=USE_DEFAULT,
rnn_cell_fn=None,
model_dir=None,
optimizer='Adagrad',
input_layer_partitioner=None,
config=None):
"""Initializes a `RNNClassifier` instance.
Args:
head: A `_Head` instance constructed with a method such as
`tf.contrib.estimator.multi_label_head`. This specifies the model's
output and loss function to be optimized.
sequence_feature_columns: An iterable containing the `FeatureColumn`s
that represent sequential input. All items in the set should either be
sequence columns (e.g. `sequence_numeric_column`) or constructed from
one (e.g. `embedding_column` with `sequence_categorical_column_*` as
input).
context_feature_columns: An iterable containing the `FeatureColumn`s
for contextual input. The data represented by these columns will be
replicated and given to the RNN at each timestep. These columns must be
instances of classes derived from `_DenseColumn` such as
`numeric_column`, not the sequential variants.
num_units: Iterable of integer number of hidden units per RNN layer. If
set, `cell_type` must also be specified and `rnn_cell_fn` must be
`None`.
cell_type: A subclass of `tf.nn.rnn_cell.RNNCell` or a string specifying
the cell type. Supported strings are: `'basic_rnn'`, `'lstm'`, and
`'gru'`. If set, `num_units` must also be specified and `rnn_cell_fn`
must be `None`.
rnn_cell_fn: A function with one argument, a `tf.estimator.ModeKeys`, and
returns an object of type `tf.nn.rnn_cell.RNNCell` that will be used to
construct the RNN. If set, `num_units` and `cell_type` cannot be set.
This is for advanced users who need additional customization beyond
`num_units` and `cell_type`. Note that `tf.nn.rnn_cell.MultiRNNCell` is
needed for stacked RNNs.
model_dir: Directory to save model parameters, graph and etc. This can
also be used to load checkpoints from the directory into a estimator to
continue training a previously saved model.
optimizer: An instance of `tf.Optimizer` or string specifying optimizer
type. Defaults to Adagrad optimizer.
input_layer_partitioner: Optional. Partitioner for input layer. Defaults
to `min_max_variable_partitioner` with `min_slice_size` 64 << 20.
config: `RunConfig` object to configure the runtime settings.
Raises:
ValueError: If `num_units`, `cell_type`, and `rnn_cell_fn` are not
compatible.
"""
rnn_cell_fn = _assert_rnn_cell_fn(rnn_cell_fn, num_units, cell_type)
def _model_fn(features, labels, mode, config):
return _rnn_model_fn(
features=features,
labels=labels,
mode=mode,
head=head,
rnn_cell_fn=rnn_cell_fn,
sequence_feature_columns=tuple(sequence_feature_columns or []),
context_feature_columns=tuple(context_feature_columns or []),
optimizer=optimizer,
input_layer_partitioner=input_layer_partitioner,
config=config)
super(RNNEstimator, self).__init__(
model_fn=_model_fn, model_dir=model_dir, config=config)
| 42.502377
| 113
| 0.688243
|
58ae4141137cdc44a4c1fcec27bf6c7e77887331
| 5,275
|
py
|
Python
|
project_spring_2020_morgankindel_pkg/main.py
|
morgankindel/project_spring_2020
|
868c7d7c9d4cf405c2ec1d25c4eb07d4a625e063
|
[
"Apache-2.0"
] | null | null | null |
project_spring_2020_morgankindel_pkg/main.py
|
morgankindel/project_spring_2020
|
868c7d7c9d4cf405c2ec1d25c4eb07d4a625e063
|
[
"Apache-2.0"
] | null | null | null |
project_spring_2020_morgankindel_pkg/main.py
|
morgankindel/project_spring_2020
|
868c7d7c9d4cf405c2ec1d25c4eb07d4a625e063
|
[
"Apache-2.0"
] | null | null | null |
#import required packages
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import cv2
from PIL import Image
from os import path
#load input image from Sample images file folder or set new path for new input image
#--create a counter that upticks when a sequentially named image is found:
#--all images must be named img1 -> imgX where X is the total number of images
#--this counter tracks how many images we are matching stored in 'filecount'
def filecount(i):
i=1
pathPresence = True
while pathPresence == True:
pathPresence = path.exists('./Documents/project_spring_2020_burkekindel_pkg/Sample_images/img%d.jpg' %i )
print(pathPresence)
i+=1
print(i)
print(i-2)
filecount = i-2
i=1
return(filecount())
#--create a dictionary to hold our identified number of images so they can be easily looped through
#--length of this dictionary will serve as our new filecount as well
def create_dict(i):
imgDict = {}
while i <= filecount:
input_img = cv2.imread('./Documents/project_spring_2020_burkekindel_pkg/Sample_images/img%d.jpg' %i)
#scale input image and set threshold
scale_percent = 8 #percent of original image size
width = int(input_img.shape[1]* scale_percent/100)
height = int(input_img.shape[0]* scale_percent/100)
dim = (width, height)
input_img_resized = cv2.resize(input_img, dim, interpolation = cv2.INTER_AREA)
# input_img_gs = cv2.cvtColor(input_img_resized, cv2.COLOR_BGR2GRAY)
thresh, input_img_bw = cv2.threshold(input_img_resized,5,250,cv2.THRESH_BINARY)
imgDict['%d' %i] = input_img_bw
i+=1
return create_dict()
import math
import os
#load reference image file and set output file name
template_img_dir = './Documents/project_spring_2020_burkekindel_pkg/Reference_images'
result_grid_filename = './Documents/project_spring_2020_burkekindel_pkg/grid.jpg'
result_figsize_resolution = 40 # 1 = 100px
#create list variable that lists files in reference images, and length variable for counting iterations
images_list = sorted(os.listdir(template_img_dir))
images_count = len(images_list)
print('Images: ', images_list)
print('Images count: ', images_count)
# Calculate the grid size:
grid_size = math.ceil(math.sqrt(images_count))
# Create plt plot:
fig, axes = plt.subplots(grid_size, grid_size, figsize=(result_figsize_resolution, result_figsize_resolution))
current_file_number = 0
# create an image grid of reference images that will be compared to the input image.
for image_filename in images_list:
x_position = current_file_number % grid_size
y_position = current_file_number // grid_size
plt_image = plt.imread(template_img_dir + '/' + images_list[current_file_number])
axes[x_position, y_position].imshow(plt_image)
axes[0,0].set_title('Bregma: -0.46', fontsize=50)
axes[0,1].set_title('Bregma: -0.58', fontsize=50)
axes[0,2].set_title('Bregma: -0.70', fontsize=50)
axes[1,0].set_title('Bregma: -0.82', fontsize=50)
axes[1,1].set_title('Bregma: -0.94', fontsize=50)
axes[1,2].set_title('Bregma: -1.00', fontsize=50)
axes[2,0].set_title('Bregma: -1.06', fontsize=50)
axes[2,1].set_title('Bregma: -1.12', fontsize=50)
axes[2,2].set_title('Bregma: -1.32', fontsize=50)
print((current_file_number + 1), '/', images_count, ': ', image_filename)
current_file_number += 1
plt.subplots_adjust(left=0.0, right=.9, bottom=0.0, top=.9)
plt.savefig(result_grid_filename)
#save template grid for future reference
template_grid = cv2.imread('./Documents/project_spring_2020_burkekindel_pkg/grid.jpg')
#resize template grid
scale_percent = 25 #percent of original image size
temp_width = int(template_grid.shape[1] * scale_percent/100)
temp_height = int(template_grid.shape[0]* scale_percent/100)
temp_dim = (temp_width, temp_height)
template_grid= cv2.resize(template_grid, temp_dim, interpolation = cv2.INTER_AREA)
#match template grid to sample/input image and frame reference image with most similarity
#--Loop runs the length of imgDict
#--Creates a copy of template_grid each time to draw our rectangle on
#--Spits out image named as the input was minus 'img'
i=1
while i <= len(imgDict):
tempGrid = template_grid.copy()
#--it is very dumb that the thing we compare our images too is called the image and the images we compare are called the template
res = cv2.matchTemplate(tempGrid, imgDict['%d'%i], cv2.TM_CCOEFF)
cv2.imwrite('./Documents/project_spring_2020_burkekindel_pkg/Processed_images/res%d.jpg' %i, res)
min_val, max_val, min_loc, max_loc = cv2.minMaxLoc(res)
top_left = max_loc
bottom_right = (top_left[0] + width, top_left[1]+height)
cv2.rectangle(tempGrid, top_left, bottom_right, 255, 2)
#save framed grid to Processed images file './Processed_images/___.jpg',
#before running code set file name that corresponds to original input image
output_filepath = './Documents/project_spring_2020_burkekindel_pkg/Processed_images/%d.jpg'%i
status = cv2.imwrite(output_filepath, tempGrid)
#double check that file has saved
print('New image has been saved: ', status)
i+=1
print(i)
| 37.678571
| 133
| 0.730047
|
fa141cb72fa209cced62b1277ef56dca8dff6358
| 2,703
|
py
|
Python
|
mapping/models.py
|
joshua-taylor/dataIntegrator
|
dc0cdc6bb0177b6499d49ca2e39b42291656235e
|
[
"MIT"
] | null | null | null |
mapping/models.py
|
joshua-taylor/dataIntegrator
|
dc0cdc6bb0177b6499d49ca2e39b42291656235e
|
[
"MIT"
] | null | null | null |
mapping/models.py
|
joshua-taylor/dataIntegrator
|
dc0cdc6bb0177b6499d49ca2e39b42291656235e
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from django.conf import settings
from django.db import models
import datetime
class Requests(models.Model):
creator = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
name = models.CharField(max_length=40, blank=True)
desc = models.CharField(max_length=255, blank=True)
send_to = models.CharField(max_length=2048, blank=False)
status = models.CharField(max_length=10, default='DRAFT', blank=False) # DRAFT ACTIVE CLOSED
deadline_date = models.DateField(("Deadline"), default=datetime.date.today)
created_at = models.DateTimeField(auto_now_add=True)
class Template(models.Model):
Request = models.ForeignKey(
Requests,
on_delete=models.CASCADE,
)
field_name = type = models.CharField(max_length=64, blank=False)
type = models.CharField(max_length=20, blank=False)
desc = models.CharField(max_length=255, blank=True)
max_len = models.IntegerField(blank=False)
manditory = models.BooleanField()
created_at = models.DateTimeField(auto_now_add=True)
# This is the field level detail for a response. It links to a request and sits side-by-side with a template.
class Response(models.Model):
Request = models.ForeignKey(
Requests,
on_delete=models.CASCADE,
)
creator = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=10, default='DRAFT', blank=False) # DRAFT ACTIVE CLOSED
mapped_field_name = models.CharField(max_length=64, blank=False)
field_name = type = models.CharField(max_length=64, blank=False)
type = models.CharField(max_length=64, blank=False)
desc = models.CharField(max_length=255, blank=True)
max_len = models.IntegerField(blank=False)
manditory = models.BooleanField()
#This is essentially the header file for a response but it also contains the file itself:
class ResponseFile(models.Model):
Request = models.ForeignKey(
Requests,
on_delete=models.CASCADE,
)
creator = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
)
rows = models.IntegerField(blank=True,null=True)
badRows = models.IntegerField(blank=True,null=True)
documentPath = models.CharField(max_length=255, blank=True,null=True)
document = models.FileField(null=True, blank=True)
type = models.CharField(max_length=10, default='RESPONSE', blank=False) #used to determine if response file or if created by owner RESPONSE or DOWNLOAD
created_at = models.DateTimeField(auto_now_add=True)
| 40.343284
| 155
| 0.73067
|
ef4619646f2c43dd99232c6c123605adfcbf3e4b
| 3,008
|
py
|
Python
|
federatedml/protobuf/generated/pipeline_pb2.py
|
fqiang/FATE
|
36a5a41848f78df7be1e520ae804e64bc67d72fb
|
[
"Apache-2.0"
] | 3
|
2021-11-01T01:22:58.000Z
|
2021-12-29T07:54:51.000Z
|
federatedml/protobuf/generated/pipeline_pb2.py
|
fqiang/FATE
|
36a5a41848f78df7be1e520ae804e64bc67d72fb
|
[
"Apache-2.0"
] | 9
|
2020-01-28T23:05:25.000Z
|
2022-02-10T00:31:01.000Z
|
federatedml/protobuf/generated/pipeline_pb2.py
|
fqiang/FATE
|
36a5a41848f78df7be1e520ae804e64bc67d72fb
|
[
"Apache-2.0"
] | 1
|
2022-01-04T07:05:52.000Z
|
2022-01-04T07:05:52.000Z
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pipeline.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='pipeline.proto',
package='com.webank.ai.fate.core.mlmodel.buffer',
syntax='proto3',
serialized_pb=_b('\n\x0epipeline.proto\x12&com.webank.ai.fate.core.mlmodel.buffer\"P\n\x08Pipeline\x12\x15\n\rinference_dsl\x18\x01 \x01(\x0c\x12\x11\n\ttrain_dsl\x18\x02 \x01(\x0c\x12\x1a\n\x12train_runtime_conf\x18\x03 \x01(\x0c\x42\x0f\x42\rPipelineProtob\x06proto3')
)
_PIPELINE = _descriptor.Descriptor(
name='Pipeline',
full_name='com.webank.ai.fate.core.mlmodel.buffer.Pipeline',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='inference_dsl', full_name='com.webank.ai.fate.core.mlmodel.buffer.Pipeline.inference_dsl', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_dsl', full_name='com.webank.ai.fate.core.mlmodel.buffer.Pipeline.train_dsl', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='train_runtime_conf', full_name='com.webank.ai.fate.core.mlmodel.buffer.Pipeline.train_runtime_conf', index=2,
number=3, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=58,
serialized_end=138,
)
DESCRIPTOR.message_types_by_name['Pipeline'] = _PIPELINE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Pipeline = _reflection.GeneratedProtocolMessageType('Pipeline', (_message.Message,), dict(
DESCRIPTOR = _PIPELINE,
__module__ = 'pipeline_pb2'
# @@protoc_insertion_point(class_scope:com.webank.ai.fate.core.mlmodel.buffer.Pipeline)
))
_sym_db.RegisterMessage(Pipeline)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('B\rPipelineProto'))
# @@protoc_insertion_point(module_scope)
| 34.976744
| 272
| 0.753657
|
c89331b135ea9fe3dc55b9eaa80c7f946b18aefc
| 535
|
py
|
Python
|
ML-Course-NTU-Lee/hw3/demo/plot.py
|
kaka-lin/ML-Courses
|
d7111fe64db3efae9acafa4532824c9554dbcaf1
|
[
"MIT"
] | null | null | null |
ML-Course-NTU-Lee/hw3/demo/plot.py
|
kaka-lin/ML-Courses
|
d7111fe64db3efae9acafa4532824c9554dbcaf1
|
[
"MIT"
] | null | null | null |
ML-Course-NTU-Lee/hw3/demo/plot.py
|
kaka-lin/ML-Courses
|
d7111fe64db3efae9acafa4532824c9554dbcaf1
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -- coding: utf-8 --
import argparse
from keras.utils.vis_utils import plot_model
from keras.models import load_model
def main():
parser = argparse.ArgumentParser(prog='plot_model.py',
description='Plot the model.')
parser.add_argument('--model',type=str,default='model/model-7.h5')
args = parser.parse_args()
emotion_classifier = load_model(args.model)
emotion_classifier.summary()
plot_model(emotion_classifier,to_file='model.png')
if __name__ == '__main__':
main()
| 26.75
| 70
| 0.71028
|
38ea999161bc23fb4e4f8b636ebab0f5c1f53ec4
| 181,400
|
py
|
Python
|
python/src/chirpstack_api/as_pb/external/api/application_pb2.py
|
mattocchi/chirpstack-api
|
1db59dd135b4c2dff4a6a6df1884615288d6bd65
|
[
"MIT"
] | null | null | null |
python/src/chirpstack_api/as_pb/external/api/application_pb2.py
|
mattocchi/chirpstack-api
|
1db59dd135b4c2dff4a6a6df1884615288d6bd65
|
[
"MIT"
] | null | null | null |
python/src/chirpstack_api/as_pb/external/api/application_pb2.py
|
mattocchi/chirpstack-api
|
1db59dd135b4c2dff4a6a6df1884615288d6bd65
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: chirpstack-api/as_pb/external/api/application.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='chirpstack-api/as_pb/external/api/application.proto',
package='api',
syntax='proto3',
serialized_options=b'\n!io.chirpstack.api.as.external.apiZ7github.com/brocaar/chirpstack-api/go/v3/as/external/api',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n3chirpstack-api/as_pb/external/api/application.proto\x12\x03\x61pi\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1bgoogle/protobuf/empty.proto\"\xea\x01\n\x0b\x41pplication\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\'\n\x0forganization_id\x18\x04 \x01(\x03R\x0eorganizationID\x12,\n\x12service_profile_id\x18\x05 \x01(\tR\x10serviceProfileID\x12\x15\n\rpayload_codec\x18\x06 \x01(\t\x12\x1e\n\x16payload_encoder_script\x18\x07 \x01(\t\x12\x1e\n\x16payload_decoder_script\x18\x08 \x01(\t\"\xb9\x01\n\x13\x41pplicationListItem\x12\n\n\x02id\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\'\n\x0forganization_id\x18\x04 \x01(\x03R\x0eorganizationID\x12,\n\x12service_profile_id\x18\x05 \x01(\tR\x10serviceProfileID\x12\x1c\n\x14service_profile_name\x18\x06 \x01(\t\"A\n\x18\x43reateApplicationRequest\x12%\n\x0b\x61pplication\x18\x01 \x01(\x0b\x32\x10.api.Application\"\'\n\x19\x43reateApplicationResponse\x12\n\n\x02id\x18\x01 \x01(\x03\"#\n\x15GetApplicationRequest\x12\n\n\x02id\x18\x01 \x01(\x03\"?\n\x16GetApplicationResponse\x12%\n\x0b\x61pplication\x18\x01 \x01(\x0b\x32\x10.api.Application\"A\n\x18UpdateApplicationRequest\x12%\n\x0b\x61pplication\x18\x01 \x01(\x0b\x32\x10.api.Application\"&\n\x18\x44\x65leteApplicationRequest\x12\n\n\x02id\x18\x01 \x01(\x03\"p\n\x16ListApplicationRequest\x12\r\n\x05limit\x18\x01 \x01(\x03\x12\x0e\n\x06offset\x18\x02 \x01(\x03\x12\'\n\x0forganization_id\x18\x03 \x01(\x03R\x0eorganizationID\x12\x0e\n\x06search\x18\x04 \x01(\t\"X\n\x17ListApplicationResponse\x12\x13\n\x0btotal_count\x18\x01 \x01(\x03\x12(\n\x06result\x18\x02 \x03(\x0b\x32\x18.api.ApplicationListItem\"3\n\x15HTTPIntegrationHeader\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"\xe7\x04\n\x0fHTTPIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12+\n\x07headers\x18\x02 \x03(\x0b\x32\x1a.api.HTTPIntegrationHeader\x12&\n\x0fuplink_data_url\x18\x03 \x01(\tR\ruplinkDataURL\x12\x32\n\x15join_notification_url\x18\x04 \x01(\tR\x13joinNotificationURL\x12\x30\n\x14\x61\x63k_notification_url\x18\x05 \x01(\tR\x12\x61\x63kNotificationURL\x12\x34\n\x16\x65rror_notification_url\x18\x06 \x01(\tR\x14\x65rrorNotificationURL\x12\x36\n\x17status_notification_url\x18\x07 \x01(\tR\x15statusNotificationURL\x12:\n\x19location_notification_url\x18\x08 \x01(\tR\x17locationNotificationURL\x12\x35\n\x17tx_ack_notification_url\x18\t \x01(\tR\x14txAckNotificationURL\x12@\n\x1cintegration_notification_url\x18\n \x01(\tR\x1aintegrationNotificationURL\x12!\n\tmarshaler\x18\x0b \x01(\x0e\x32\x0e.api.Marshaler\x12,\n\x12\x65vent_endpoint_url\x18\x0c \x01(\tR\x10\x65ventEndpointURL\"I\n\x1c\x43reateHTTPIntegrationRequest\x12)\n\x0bintegration\x18\x01 \x01(\x0b\x32\x14.api.HTTPIntegration\"B\n\x19GetHTTPIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"G\n\x1aGetHTTPIntegrationResponse\x12)\n\x0bintegration\x18\x01 \x01(\x0b\x32\x14.api.HTTPIntegration\"I\n\x1cUpdateHTTPIntegrationRequest\x12)\n\x0bintegration\x18\x01 \x01(\x0b\x32\x14.api.HTTPIntegration\"E\n\x1c\x44\x65leteHTTPIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"?\n\x16ListIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"9\n\x13IntegrationListItem\x12\"\n\x04kind\x18\x01 \x01(\x0e\x32\x14.api.IntegrationKind\"X\n\x17ListIntegrationResponse\x12\x13\n\x0btotal_count\x18\x01 \x01(\x03\x12(\n\x06result\x18\x02 \x03(\x0b\x32\x18.api.IntegrationListItem\"\xc8\x01\n\x13InfluxDBIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12\x10\n\x08\x65ndpoint\x18\x02 \x01(\t\x12\n\n\x02\x64\x62\x18\x03 \x01(\t\x12\x10\n\x08username\x18\x04 \x01(\t\x12\x10\n\x08password\x18\x05 \x01(\t\x12\x1d\n\x15retention_policy_name\x18\x06 \x01(\t\x12)\n\tprecision\x18\x07 \x01(\x0e\x32\x16.api.InfluxDBPrecision\"Q\n CreateInfluxDBIntegrationRequest\x12-\n\x0bintegration\x18\x01 \x01(\x0b\x32\x18.api.InfluxDBIntegration\"F\n\x1dGetInfluxDBIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"O\n\x1eGetInfluxDBIntegrationResponse\x12-\n\x0bintegration\x18\x01 \x01(\x0b\x32\x18.api.InfluxDBIntegration\"Q\n UpdateInfluxDBIntegrationRequest\x12-\n\x0bintegration\x18\x01 \x01(\x0b\x32\x18.api.InfluxDBIntegration\"I\n DeleteInfluxDBIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"O\n\x16ThingsBoardIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12\x0e\n\x06server\x18\x02 \x01(\t\"W\n#CreateThingsBoardIntegrationRequest\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.ThingsBoardIntegration\"I\n GetThingsBoardIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"U\n!GetThingsBoardIntegrationResponse\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.ThingsBoardIntegration\"W\n#UpdateThingsBoardIntegrationRequest\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.ThingsBoardIntegration\"L\n#DeleteThingsBoardIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"O\n\x14MyDevicesIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12\x10\n\x08\x65ndpoint\x18\x02 \x01(\t\"S\n!CreateMyDevicesIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.MyDevicesIntegration\"G\n\x1eGetMyDevicesIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"Q\n\x1fGetMyDevicesIntegrationResponse\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.MyDevicesIntegration\"S\n!UpdateMyDevicesIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.MyDevicesIntegration\"J\n!DeleteMyDevicesIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"\xf3\x05\n\x14LoRaCloudIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12\x13\n\x0bgeolocation\x18\x02 \x01(\x08\x12\x19\n\x11geolocation_token\x18\x03 \x01(\t\x12\x34\n\x16geolocation_buffer_ttl\x18\x04 \x01(\rR\x14geolocationBufferTTL\x12#\n\x1bgeolocation_min_buffer_size\x18\x05 \x01(\r\x12)\n\x10geolocation_tdoa\x18\x06 \x01(\x08R\x0fgeolocationTDOA\x12)\n\x10geolocation_rssi\x18\x07 \x01(\x08R\x0fgeolocationRSSI\x12)\n\x10geolocation_gnss\x18\x08 \x01(\x08R\x0fgeolocationGNSS\x12\x43\n\x1egeolocation_gnss_payload_field\x18\t \x01(\tR\x1bgeolocationGNSSPayloadField\x12>\n\x1cgeolocation_gnss_use_rx_time\x18\n \x01(\x08R\x18geolocationGNSSUseRxTime\x12)\n\x10geolocation_wifi\x18\x0b \x01(\x08R\x0fgeolocationWifi\x12\x43\n\x1egeolocation_wifi_payload_field\x18\x0c \x01(\tR\x1bgeolocationWifiPayloadField\x12\x0b\n\x03\x64\x61s\x18\r \x01(\x08\x12\x11\n\tdas_token\x18\x0e \x01(\t\x12\x16\n\x0e\x64\x61s_modem_port\x18\x0f \x01(\r\x12\"\n\rdas_gnss_port\x18\x10 \x01(\rR\x0b\x64\x61sGNSSPort\x12.\n\x14\x64\x61s_gnss_use_rx_time\x18\x11 \x01(\x08R\x10\x64\x61sGNSSUseRxTime\x12\'\n\x1f\x64\x61s_streaming_geoloc_workaround\x18\x12 \x01(\x08\"S\n!CreateLoRaCloudIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.LoRaCloudIntegration\"G\n\x1eGetLoRaCloudIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"Q\n\x1fGetLoRaCloudIntegrationResponse\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.LoRaCloudIntegration\"S\n!UpdateLoRaCloudIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.LoRaCloudIntegration\"J\n!DeleteLoRaCloudIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"\xad\x01\n\x14GCPPubSubIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12!\n\tmarshaler\x18\x02 \x01(\x0e\x32\x0e.api.Marshaler\x12\x18\n\x10\x63redentials_file\x18\x03 \x01(\t\x12\x1d\n\nproject_id\x18\x04 \x01(\tR\tprojectID\x12\x12\n\ntopic_name\x18\x05 \x01(\t\"S\n!CreateGCPPubSubIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.GCPPubSubIntegration\"G\n\x1eGetGCPPubSubIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"Q\n\x1fGetGCPPubSubIntegrationResponse\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.GCPPubSubIntegration\"S\n!UpdateGCPPubSubIntegrationRequest\x12.\n\x0bintegration\x18\x01 \x01(\x0b\x32\x19.api.GCPPubSubIntegration\"J\n!DeleteGCPPubSubIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"\xc9\x01\n\x11\x41WSSNSIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12!\n\tmarshaler\x18\x02 \x01(\x0e\x32\x0e.api.Marshaler\x12\x0e\n\x06region\x18\x03 \x01(\t\x12\"\n\raccess_key_id\x18\x04 \x01(\tR\x0b\x61\x63\x63\x65ssKeyID\x12\x19\n\x11secret_access_key\x18\x05 \x01(\t\x12\x1b\n\ttopic_arn\x18\x06 \x01(\tR\x08topicARN\"M\n\x1e\x43reateAWSSNSIntegrationRequest\x12+\n\x0bintegration\x18\x01 \x01(\x0b\x32\x16.api.AWSSNSIntegration\"D\n\x1bGetAWSSNSIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"K\n\x1cGetAWSSNSIntegrationResponse\x12+\n\x0bintegration\x18\x01 \x01(\x0b\x32\x16.api.AWSSNSIntegration\"M\n\x1eUpdateAWSSNSIntegrationRequest\x12+\n\x0bintegration\x18\x01 \x01(\x0b\x32\x16.api.AWSSNSIntegration\"G\n\x1e\x44\x65leteAWSSNSIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"\x97\x01\n\x1a\x41zureServiceBusIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12!\n\tmarshaler\x18\x02 \x01(\x0e\x32\x0e.api.Marshaler\x12\x19\n\x11\x63onnection_string\x18\x03 \x01(\t\x12\x14\n\x0cpublish_name\x18\x04 \x01(\t\"_\n\'CreateAzureServiceBusIntegrationRequest\x12\x34\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1f.api.AzureServiceBusIntegration\"M\n$GetAzureServiceBusIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"]\n%GetAzureServiceBusIntegrationResponse\x12\x34\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1f.api.AzureServiceBusIntegration\"_\n\'UpdateAzureServiceBusIntegrationRequest\x12\x34\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1f.api.AzureServiceBusIntegration\"P\n\'DeleteAzureServiceBusIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"^\n\x16PilotThingsIntegration\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\x12\x0e\n\x06server\x18\x02 \x01(\t\x12\r\n\x05token\x18\x03 \x01(\t\"W\n#CreatePilotThingsIntegrationRequest\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.PilotThingsIntegration\"I\n GetPilotThingsIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"U\n!GetPilotThingsIntegrationResponse\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.PilotThingsIntegration\"W\n#UpdatePilotThingsIntegrationRequest\x12\x30\n\x0bintegration\x18\x01 \x01(\x0b\x32\x1b.api.PilotThingsIntegration\"L\n#DeletePilotThingsIntegrationRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"X\n/GenerateMQTTIntegrationClientCertificateRequest\x12%\n\x0e\x61pplication_id\x18\x01 \x01(\x03R\rapplicationID\"\x96\x01\n0GenerateMQTTIntegrationClientCertificateResponse\x12\x10\n\x08tls_cert\x18\x01 \x01(\t\x12\x0f\n\x07tls_key\x18\x02 \x01(\t\x12\x0f\n\x07\x63\x61_cert\x18\x03 \x01(\t\x12.\n\nexpires_at\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.Timestamp*\xaf\x01\n\x0fIntegrationKind\x12\x08\n\x04HTTP\x10\x00\x12\x0c\n\x08INFLUXDB\x10\x01\x12\x0f\n\x0bTHINGSBOARD\x10\x02\x12\r\n\tMYDEVICES\x10\x03\x12\r\n\tLORACLOUD\x10\x04\x12\x0e\n\nGCP_PUBSUB\x10\x05\x12\x0b\n\x07\x41WS_SNS\x10\x06\x12\x15\n\x11\x41ZURE_SERVICE_BUS\x10\x07\x12\x10\n\x0cPILOT_THINGS\x10\x08\x12\x0f\n\x0bMQTT_GLOBAL\x10\t*0\n\tMarshaler\x12\x08\n\x04JSON\x10\x00\x12\x0c\n\x08PROTOBUF\x10\x01\x12\x0b\n\x07JSON_V3\x10\x02*?\n\x11InfluxDBPrecision\x12\x06\n\x02NS\x10\x00\x12\x05\n\x01U\x10\x01\x12\x06\n\x02MS\x10\x02\x12\x05\n\x01S\x10\x03\x12\x05\n\x01M\x10\x04\x12\x05\n\x01H\x10\x05\x32\xba\x37\n\x12\x41pplicationService\x12\x65\n\x06\x43reate\x12\x1d.api.CreateApplicationRequest\x1a\x1e.api.CreateApplicationResponse\"\x1c\x82\xd3\xe4\x93\x02\x16\"\x11/api/applications:\x01*\x12^\n\x03Get\x12\x1a.api.GetApplicationRequest\x1a\x1b.api.GetApplicationResponse\"\x1e\x82\xd3\xe4\x93\x02\x18\x12\x16/api/applications/{id}\x12n\n\x06Update\x12\x1d.api.UpdateApplicationRequest\x1a\x16.google.protobuf.Empty\"-\x82\xd3\xe4\x93\x02\'\x1a\"/api/applications/{application.id}:\x01*\x12_\n\x06\x44\x65lete\x12\x1d.api.DeleteApplicationRequest\x1a\x16.google.protobuf.Empty\"\x1e\x82\xd3\xe4\x93\x02\x18*\x16/api/applications/{id}\x12\\\n\x04List\x12\x1b.api.ListApplicationRequest\x1a\x1c.api.ListApplicationResponse\"\x19\x82\xd3\xe4\x93\x02\x13\x12\x11/api/applications\x12\x9f\x01\n\x15\x43reateHTTPIntegration\x12!.api.CreateHTTPIntegrationRequest\x1a\x16.google.protobuf.Empty\"K\x82\xd3\xe4\x93\x02\x45\"@/api/applications/{integration.application_id}/integrations/http:\x01*\x12\x93\x01\n\x12GetHTTPIntegration\x12\x1e.api.GetHTTPIntegrationRequest\x1a\x1f.api.GetHTTPIntegrationResponse\"<\x82\xd3\xe4\x93\x02\x36\x12\x34/api/applications/{application_id}/integrations/http\x12\x9f\x01\n\x15UpdateHTTPIntegration\x12!.api.UpdateHTTPIntegrationRequest\x1a\x16.google.protobuf.Empty\"K\x82\xd3\xe4\x93\x02\x45\x1a@/api/applications/{integration.application_id}/integrations/http:\x01*\x12\x90\x01\n\x15\x44\x65leteHTTPIntegration\x12!.api.DeleteHTTPIntegrationRequest\x1a\x16.google.protobuf.Empty\"<\x82\xd3\xe4\x93\x02\x36*4/api/applications/{application_id}/integrations/http\x12\xab\x01\n\x19\x43reateInfluxDBIntegration\x12%.api.CreateInfluxDBIntegrationRequest\x1a\x16.google.protobuf.Empty\"O\x82\xd3\xe4\x93\x02I\"D/api/applications/{integration.application_id}/integrations/influxdb:\x01*\x12\xa3\x01\n\x16GetInfluxDBIntegration\x12\".api.GetInfluxDBIntegrationRequest\x1a#.api.GetInfluxDBIntegrationResponse\"@\x82\xd3\xe4\x93\x02:\x12\x38/api/applications/{application_id}/integrations/influxdb\x12\xab\x01\n\x19UpdateInfluxDBIntegration\x12%.api.UpdateInfluxDBIntegrationRequest\x1a\x16.google.protobuf.Empty\"O\x82\xd3\xe4\x93\x02I\x1a\x44/api/applications/{integration.application_id}/integrations/influxdb:\x01*\x12\x9c\x01\n\x19\x44\x65leteInfluxDBIntegration\x12%.api.DeleteInfluxDBIntegrationRequest\x1a\x16.google.protobuf.Empty\"@\x82\xd3\xe4\x93\x02:*8/api/applications/{application_id}/integrations/influxdb\x12\xb4\x01\n\x1c\x43reateThingsBoardIntegration\x12(.api.CreateThingsBoardIntegrationRequest\x1a\x16.google.protobuf.Empty\"R\x82\xd3\xe4\x93\x02L\"G/api/applications/{integration.application_id}/integrations/thingsboard:\x01*\x12\xaf\x01\n\x19GetThingsBoardIntegration\x12%.api.GetThingsBoardIntegrationRequest\x1a&.api.GetThingsBoardIntegrationResponse\"C\x82\xd3\xe4\x93\x02=\x12;/api/applications/{application_id}/integrations/thingsboard\x12\xb4\x01\n\x1cUpdateThingsBoardIntegration\x12(.api.UpdateThingsBoardIntegrationRequest\x1a\x16.google.protobuf.Empty\"R\x82\xd3\xe4\x93\x02L\x1aG/api/applications/{integration.application_id}/integrations/thingsboard:\x01*\x12\xa5\x01\n\x1c\x44\x65leteThingsBoardIntegration\x12(.api.DeleteThingsBoardIntegrationRequest\x1a\x16.google.protobuf.Empty\"C\x82\xd3\xe4\x93\x02=*;/api/applications/{application_id}/integrations/thingsboard\x12\xae\x01\n\x1a\x43reateMyDevicesIntegration\x12&.api.CreateMyDevicesIntegrationRequest\x1a\x16.google.protobuf.Empty\"P\x82\xd3\xe4\x93\x02J\"E/api/applications/{integration.application_id}/integrations/mydevices:\x01*\x12\xa7\x01\n\x17GetMyDevicesIntegration\x12#.api.GetMyDevicesIntegrationRequest\x1a$.api.GetMyDevicesIntegrationResponse\"A\x82\xd3\xe4\x93\x02;\x12\x39/api/applications/{application_id}/integrations/mydevices\x12\xae\x01\n\x1aUpdateMyDevicesIntegration\x12&.api.UpdateMyDevicesIntegrationRequest\x1a\x16.google.protobuf.Empty\"P\x82\xd3\xe4\x93\x02J\x1a\x45/api/applications/{integration.application_id}/integrations/mydevices:\x01*\x12\x9f\x01\n\x1a\x44\x65leteMyDevicesIntegration\x12&.api.DeleteMyDevicesIntegrationRequest\x1a\x16.google.protobuf.Empty\"A\x82\xd3\xe4\x93\x02;*9/api/applications/{application_id}/integrations/mydevices\x12\xae\x01\n\x1a\x43reateLoRaCloudIntegration\x12&.api.CreateLoRaCloudIntegrationRequest\x1a\x16.google.protobuf.Empty\"P\x82\xd3\xe4\x93\x02J\"E/api/applications/{integration.application_id}/integrations/loracloud:\x01*\x12\xa7\x01\n\x17GetLoRaCloudIntegration\x12#.api.GetLoRaCloudIntegrationRequest\x1a$.api.GetLoRaCloudIntegrationResponse\"A\x82\xd3\xe4\x93\x02;\x12\x39/api/applications/{application_id}/integrations/loracloud\x12\xae\x01\n\x1aUpdateLoRaCloudIntegration\x12&.api.UpdateLoRaCloudIntegrationRequest\x1a\x16.google.protobuf.Empty\"P\x82\xd3\xe4\x93\x02J\x1a\x45/api/applications/{integration.application_id}/integrations/loracloud:\x01*\x12\x9f\x01\n\x1a\x44\x65leteLoRaCloudIntegration\x12&.api.DeleteLoRaCloudIntegrationRequest\x1a\x16.google.protobuf.Empty\"A\x82\xd3\xe4\x93\x02;*9/api/applications/{application_id}/integrations/loracloud\x12\xb0\x01\n\x1a\x43reateGCPPubSubIntegration\x12&.api.CreateGCPPubSubIntegrationRequest\x1a\x16.google.protobuf.Empty\"R\x82\xd3\xe4\x93\x02L\"G/api/applications/{integration.application_id}/integrations/gcp-pub-sub:\x01*\x12\xa9\x01\n\x17GetGCPPubSubIntegration\x12#.api.GetGCPPubSubIntegrationRequest\x1a$.api.GetGCPPubSubIntegrationResponse\"C\x82\xd3\xe4\x93\x02=\x12;/api/applications/{application_id}/integrations/gcp-pub-sub\x12\xb0\x01\n\x1aUpdateGCPPubSubIntegration\x12&.api.UpdateGCPPubSubIntegrationRequest\x1a\x16.google.protobuf.Empty\"R\x82\xd3\xe4\x93\x02L\x1aG/api/applications/{integration.application_id}/integrations/gcp-pub-sub:\x01*\x12\xa1\x01\n\x1a\x44\x65leteGCPPubSubIntegration\x12&.api.DeleteGCPPubSubIntegrationRequest\x1a\x16.google.protobuf.Empty\"C\x82\xd3\xe4\x93\x02=*;/api/applications/{application_id}/integrations/gcp-pub-sub\x12\xa6\x01\n\x17\x43reateAWSSNSIntegration\x12#.api.CreateAWSSNSIntegrationRequest\x1a\x16.google.protobuf.Empty\"N\x82\xd3\xe4\x93\x02H\"C/api/applications/{integration.application_id}/integrations/aws-sns:\x01*\x12\x9c\x01\n\x14GetAWSSNSIntegration\x12 .api.GetAWSSNSIntegrationRequest\x1a!.api.GetAWSSNSIntegrationResponse\"?\x82\xd3\xe4\x93\x02\x39\x12\x37/api/applications/{application_id}/integrations/aws-sns\x12\xa6\x01\n\x17UpdateAWSSNSIntegration\x12#.api.UpdateAWSSNSIntegrationRequest\x1a\x16.google.protobuf.Empty\"N\x82\xd3\xe4\x93\x02H\x1a\x43/api/applications/{integration.application_id}/integrations/aws-sns:\x01*\x12\x97\x01\n\x17\x44\x65leteAWSSNSIntegration\x12#.api.DeleteAWSSNSIntegrationRequest\x1a\x16.google.protobuf.Empty\"?\x82\xd3\xe4\x93\x02\x39*7/api/applications/{application_id}/integrations/aws-sns\x12\xc2\x01\n CreateAzureServiceBusIntegration\x12,.api.CreateAzureServiceBusIntegrationRequest\x1a\x16.google.protobuf.Empty\"X\x82\xd3\xe4\x93\x02R\"M/api/applications/{integration.application_id}/integrations/azure-service-bus:\x01*\x12\xc1\x01\n\x1dGetAzureServiceBusIntegration\x12).api.GetAzureServiceBusIntegrationRequest\x1a*.api.GetAzureServiceBusIntegrationResponse\"I\x82\xd3\xe4\x93\x02\x43\x12\x41/api/applications/{application_id}/integrations/azure-service-bus\x12\xc2\x01\n UpdateAzureServiceBusIntegration\x12,.api.UpdateAzureServiceBusIntegrationRequest\x1a\x16.google.protobuf.Empty\"X\x82\xd3\xe4\x93\x02R\x1aM/api/applications/{integration.application_id}/integrations/azure-service-bus:\x01*\x12\xb3\x01\n DeleteAzureServiceBusIntegration\x12,.api.DeleteAzureServiceBusIntegrationRequest\x1a\x16.google.protobuf.Empty\"I\x82\xd3\xe4\x93\x02\x43*A/api/applications/{application_id}/integrations/azure-service-bus\x12\xb5\x01\n\x1c\x43reatePilotThingsIntegration\x12(.api.CreatePilotThingsIntegrationRequest\x1a\x16.google.protobuf.Empty\"S\x82\xd3\xe4\x93\x02M\"H/api/applications/{integration.application_id}/integrations/pilot-things:\x01*\x12\xb0\x01\n\x19GetPilotThingsIntegration\x12%.api.GetPilotThingsIntegrationRequest\x1a&.api.GetPilotThingsIntegrationResponse\"D\x82\xd3\xe4\x93\x02>\x12</api/applications/{application_id}/integrations/pilot-things\x12\xb5\x01\n\x1cUpdatePilotThingsIntegration\x12(.api.UpdatePilotThingsIntegrationRequest\x1a\x16.google.protobuf.Empty\"S\x82\xd3\xe4\x93\x02M\x1aH/api/applications/{integration.application_id}/integrations/pilot-things:\x01*\x12\xa6\x01\n\x1c\x44\x65letePilotThingsIntegration\x12(.api.DeletePilotThingsIntegrationRequest\x1a\x16.google.protobuf.Empty\"D\x82\xd3\xe4\x93\x02>*</api/applications/{application_id}/integrations/pilot-things\x12\x86\x01\n\x10ListIntegrations\x12\x1b.api.ListIntegrationRequest\x1a\x1c.api.ListIntegrationResponse\"7\x82\xd3\xe4\x93\x02\x31\x12//api/applications/{application_id}/integrations\x12\xe1\x01\n(GenerateMQTTIntegrationClientCertificate\x12\x34.api.GenerateMQTTIntegrationClientCertificateRequest\x1a\x35.api.GenerateMQTTIntegrationClientCertificateResponse\"H\x82\xd3\xe4\x93\x02\x42\"@/api/applications/{application_id}/integrations/mqtt/certificateB\\\n!io.chirpstack.api.as.external.apiZ7github.com/brocaar/chirpstack-api/go/v3/as/external/apib\x06proto3'
,
dependencies=[google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_INTEGRATIONKIND = _descriptor.EnumDescriptor(
name='IntegrationKind',
full_name='api.IntegrationKind',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='HTTP', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INFLUXDB', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THINGSBOARD', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MYDEVICES', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LORACLOUD', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GCP_PUBSUB', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AWS_SNS', index=6, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AZURE_SERVICE_BUS', index=7, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PILOT_THINGS', index=8, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MQTT_GLOBAL', index=9, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7606,
serialized_end=7781,
)
_sym_db.RegisterEnumDescriptor(_INTEGRATIONKIND)
IntegrationKind = enum_type_wrapper.EnumTypeWrapper(_INTEGRATIONKIND)
_MARSHALER = _descriptor.EnumDescriptor(
name='Marshaler',
full_name='api.Marshaler',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='JSON', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PROTOBUF', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='JSON_V3', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7783,
serialized_end=7831,
)
_sym_db.RegisterEnumDescriptor(_MARSHALER)
Marshaler = enum_type_wrapper.EnumTypeWrapper(_MARSHALER)
_INFLUXDBPRECISION = _descriptor.EnumDescriptor(
name='InfluxDBPrecision',
full_name='api.InfluxDBPrecision',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NS', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='U', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MS', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='S', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='M', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='H', index=5, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=7833,
serialized_end=7896,
)
_sym_db.RegisterEnumDescriptor(_INFLUXDBPRECISION)
InfluxDBPrecision = enum_type_wrapper.EnumTypeWrapper(_INFLUXDBPRECISION)
HTTP = 0
INFLUXDB = 1
THINGSBOARD = 2
MYDEVICES = 3
LORACLOUD = 4
GCP_PUBSUB = 5
AWS_SNS = 6
AZURE_SERVICE_BUS = 7
PILOT_THINGS = 8
MQTT_GLOBAL = 9
JSON = 0
PROTOBUF = 1
JSON_V3 = 2
NS = 0
U = 1
MS = 2
S = 3
M = 4
H = 5
_APPLICATION = _descriptor.Descriptor(
name='Application',
full_name='api.Application',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='api.Application.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='api.Application.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='api.Application.description', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='organization_id', full_name='api.Application.organization_id', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='organizationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_profile_id', full_name='api.Application.service_profile_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='serviceProfileID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload_codec', full_name='api.Application.payload_codec', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload_encoder_script', full_name='api.Application.payload_encoder_script', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='payload_decoder_script', full_name='api.Application.payload_decoder_script', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=153,
serialized_end=387,
)
_APPLICATIONLISTITEM = _descriptor.Descriptor(
name='ApplicationListItem',
full_name='api.ApplicationListItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='api.ApplicationListItem.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='api.ApplicationListItem.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='api.ApplicationListItem.description', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='organization_id', full_name='api.ApplicationListItem.organization_id', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='organizationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_profile_id', full_name='api.ApplicationListItem.service_profile_id', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='serviceProfileID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='service_profile_name', full_name='api.ApplicationListItem.service_profile_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=390,
serialized_end=575,
)
_CREATEAPPLICATIONREQUEST = _descriptor.Descriptor(
name='CreateApplicationRequest',
full_name='api.CreateApplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application', full_name='api.CreateApplicationRequest.application', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=577,
serialized_end=642,
)
_CREATEAPPLICATIONRESPONSE = _descriptor.Descriptor(
name='CreateApplicationResponse',
full_name='api.CreateApplicationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='api.CreateApplicationResponse.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=644,
serialized_end=683,
)
_GETAPPLICATIONREQUEST = _descriptor.Descriptor(
name='GetApplicationRequest',
full_name='api.GetApplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='api.GetApplicationRequest.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=685,
serialized_end=720,
)
_GETAPPLICATIONRESPONSE = _descriptor.Descriptor(
name='GetApplicationResponse',
full_name='api.GetApplicationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application', full_name='api.GetApplicationResponse.application', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=722,
serialized_end=785,
)
_UPDATEAPPLICATIONREQUEST = _descriptor.Descriptor(
name='UpdateApplicationRequest',
full_name='api.UpdateApplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application', full_name='api.UpdateApplicationRequest.application', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=787,
serialized_end=852,
)
_DELETEAPPLICATIONREQUEST = _descriptor.Descriptor(
name='DeleteApplicationRequest',
full_name='api.DeleteApplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='api.DeleteApplicationRequest.id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=854,
serialized_end=892,
)
_LISTAPPLICATIONREQUEST = _descriptor.Descriptor(
name='ListApplicationRequest',
full_name='api.ListApplicationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='limit', full_name='api.ListApplicationRequest.limit', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='api.ListApplicationRequest.offset', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='organization_id', full_name='api.ListApplicationRequest.organization_id', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='organizationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='search', full_name='api.ListApplicationRequest.search', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=894,
serialized_end=1006,
)
_LISTAPPLICATIONRESPONSE = _descriptor.Descriptor(
name='ListApplicationResponse',
full_name='api.ListApplicationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='total_count', full_name='api.ListApplicationResponse.total_count', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='api.ListApplicationResponse.result', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1008,
serialized_end=1096,
)
_HTTPINTEGRATIONHEADER = _descriptor.Descriptor(
name='HTTPIntegrationHeader',
full_name='api.HTTPIntegrationHeader',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='api.HTTPIntegrationHeader.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='api.HTTPIntegrationHeader.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1098,
serialized_end=1149,
)
_HTTPINTEGRATION = _descriptor.Descriptor(
name='HTTPIntegration',
full_name='api.HTTPIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.HTTPIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='headers', full_name='api.HTTPIntegration.headers', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='uplink_data_url', full_name='api.HTTPIntegration.uplink_data_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='uplinkDataURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='join_notification_url', full_name='api.HTTPIntegration.join_notification_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='joinNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ack_notification_url', full_name='api.HTTPIntegration.ack_notification_url', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='ackNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error_notification_url', full_name='api.HTTPIntegration.error_notification_url', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='errorNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status_notification_url', full_name='api.HTTPIntegration.status_notification_url', index=6,
number=7, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='statusNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='location_notification_url', full_name='api.HTTPIntegration.location_notification_url', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='locationNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tx_ack_notification_url', full_name='api.HTTPIntegration.tx_ack_notification_url', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='txAckNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='integration_notification_url', full_name='api.HTTPIntegration.integration_notification_url', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='integrationNotificationURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='marshaler', full_name='api.HTTPIntegration.marshaler', index=10,
number=11, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='event_endpoint_url', full_name='api.HTTPIntegration.event_endpoint_url', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='eventEndpointURL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1152,
serialized_end=1767,
)
_CREATEHTTPINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateHTTPIntegrationRequest',
full_name='api.CreateHTTPIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateHTTPIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1769,
serialized_end=1842,
)
_GETHTTPINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetHTTPIntegrationRequest',
full_name='api.GetHTTPIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetHTTPIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1844,
serialized_end=1910,
)
_GETHTTPINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetHTTPIntegrationResponse',
full_name='api.GetHTTPIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetHTTPIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1912,
serialized_end=1983,
)
_UPDATEHTTPINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateHTTPIntegrationRequest',
full_name='api.UpdateHTTPIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateHTTPIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1985,
serialized_end=2058,
)
_DELETEHTTPINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteHTTPIntegrationRequest',
full_name='api.DeleteHTTPIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteHTTPIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2060,
serialized_end=2129,
)
_LISTINTEGRATIONREQUEST = _descriptor.Descriptor(
name='ListIntegrationRequest',
full_name='api.ListIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.ListIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2131,
serialized_end=2194,
)
_INTEGRATIONLISTITEM = _descriptor.Descriptor(
name='IntegrationListItem',
full_name='api.IntegrationListItem',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kind', full_name='api.IntegrationListItem.kind', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2196,
serialized_end=2253,
)
_LISTINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='ListIntegrationResponse',
full_name='api.ListIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='total_count', full_name='api.ListIntegrationResponse.total_count', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='result', full_name='api.ListIntegrationResponse.result', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2255,
serialized_end=2343,
)
_INFLUXDBINTEGRATION = _descriptor.Descriptor(
name='InfluxDBIntegration',
full_name='api.InfluxDBIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.InfluxDBIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='endpoint', full_name='api.InfluxDBIntegration.endpoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='db', full_name='api.InfluxDBIntegration.db', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='username', full_name='api.InfluxDBIntegration.username', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='password', full_name='api.InfluxDBIntegration.password', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='retention_policy_name', full_name='api.InfluxDBIntegration.retention_policy_name', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='precision', full_name='api.InfluxDBIntegration.precision', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2346,
serialized_end=2546,
)
_CREATEINFLUXDBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateInfluxDBIntegrationRequest',
full_name='api.CreateInfluxDBIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateInfluxDBIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2548,
serialized_end=2629,
)
_GETINFLUXDBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetInfluxDBIntegrationRequest',
full_name='api.GetInfluxDBIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetInfluxDBIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2631,
serialized_end=2701,
)
_GETINFLUXDBINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetInfluxDBIntegrationResponse',
full_name='api.GetInfluxDBIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetInfluxDBIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2703,
serialized_end=2782,
)
_UPDATEINFLUXDBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateInfluxDBIntegrationRequest',
full_name='api.UpdateInfluxDBIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateInfluxDBIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2784,
serialized_end=2865,
)
_DELETEINFLUXDBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteInfluxDBIntegrationRequest',
full_name='api.DeleteInfluxDBIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteInfluxDBIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2867,
serialized_end=2940,
)
_THINGSBOARDINTEGRATION = _descriptor.Descriptor(
name='ThingsBoardIntegration',
full_name='api.ThingsBoardIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.ThingsBoardIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server', full_name='api.ThingsBoardIntegration.server', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=2942,
serialized_end=3021,
)
_CREATETHINGSBOARDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateThingsBoardIntegrationRequest',
full_name='api.CreateThingsBoardIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateThingsBoardIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3023,
serialized_end=3110,
)
_GETTHINGSBOARDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetThingsBoardIntegrationRequest',
full_name='api.GetThingsBoardIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetThingsBoardIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3112,
serialized_end=3185,
)
_GETTHINGSBOARDINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetThingsBoardIntegrationResponse',
full_name='api.GetThingsBoardIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetThingsBoardIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3187,
serialized_end=3272,
)
_UPDATETHINGSBOARDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateThingsBoardIntegrationRequest',
full_name='api.UpdateThingsBoardIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateThingsBoardIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3274,
serialized_end=3361,
)
_DELETETHINGSBOARDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteThingsBoardIntegrationRequest',
full_name='api.DeleteThingsBoardIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteThingsBoardIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3363,
serialized_end=3439,
)
_MYDEVICESINTEGRATION = _descriptor.Descriptor(
name='MyDevicesIntegration',
full_name='api.MyDevicesIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.MyDevicesIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='endpoint', full_name='api.MyDevicesIntegration.endpoint', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3441,
serialized_end=3520,
)
_CREATEMYDEVICESINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateMyDevicesIntegrationRequest',
full_name='api.CreateMyDevicesIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateMyDevicesIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3522,
serialized_end=3605,
)
_GETMYDEVICESINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetMyDevicesIntegrationRequest',
full_name='api.GetMyDevicesIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetMyDevicesIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3607,
serialized_end=3678,
)
_GETMYDEVICESINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetMyDevicesIntegrationResponse',
full_name='api.GetMyDevicesIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetMyDevicesIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3680,
serialized_end=3761,
)
_UPDATEMYDEVICESINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateMyDevicesIntegrationRequest',
full_name='api.UpdateMyDevicesIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateMyDevicesIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3763,
serialized_end=3846,
)
_DELETEMYDEVICESINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteMyDevicesIntegrationRequest',
full_name='api.DeleteMyDevicesIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteMyDevicesIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3848,
serialized_end=3922,
)
_LORACLOUDINTEGRATION = _descriptor.Descriptor(
name='LoRaCloudIntegration',
full_name='api.LoRaCloudIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.LoRaCloudIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation', full_name='api.LoRaCloudIntegration.geolocation', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_token', full_name='api.LoRaCloudIntegration.geolocation_token', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_buffer_ttl', full_name='api.LoRaCloudIntegration.geolocation_buffer_ttl', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationBufferTTL', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_min_buffer_size', full_name='api.LoRaCloudIntegration.geolocation_min_buffer_size', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_tdoa', full_name='api.LoRaCloudIntegration.geolocation_tdoa', index=5,
number=6, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationTDOA', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_rssi', full_name='api.LoRaCloudIntegration.geolocation_rssi', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationRSSI', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_gnss', full_name='api.LoRaCloudIntegration.geolocation_gnss', index=7,
number=8, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationGNSS', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_gnss_payload_field', full_name='api.LoRaCloudIntegration.geolocation_gnss_payload_field', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationGNSSPayloadField', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_gnss_use_rx_time', full_name='api.LoRaCloudIntegration.geolocation_gnss_use_rx_time', index=9,
number=10, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationGNSSUseRxTime', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_wifi', full_name='api.LoRaCloudIntegration.geolocation_wifi', index=10,
number=11, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationWifi', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='geolocation_wifi_payload_field', full_name='api.LoRaCloudIntegration.geolocation_wifi_payload_field', index=11,
number=12, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='geolocationWifiPayloadField', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das', full_name='api.LoRaCloudIntegration.das', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das_token', full_name='api.LoRaCloudIntegration.das_token', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das_modem_port', full_name='api.LoRaCloudIntegration.das_modem_port', index=14,
number=15, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das_gnss_port', full_name='api.LoRaCloudIntegration.das_gnss_port', index=15,
number=16, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='dasGNSSPort', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das_gnss_use_rx_time', full_name='api.LoRaCloudIntegration.das_gnss_use_rx_time', index=16,
number=17, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='dasGNSSUseRxTime', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='das_streaming_geoloc_workaround', full_name='api.LoRaCloudIntegration.das_streaming_geoloc_workaround', index=17,
number=18, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=3925,
serialized_end=4680,
)
_CREATELORACLOUDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateLoRaCloudIntegrationRequest',
full_name='api.CreateLoRaCloudIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateLoRaCloudIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4682,
serialized_end=4765,
)
_GETLORACLOUDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetLoRaCloudIntegrationRequest',
full_name='api.GetLoRaCloudIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetLoRaCloudIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4767,
serialized_end=4838,
)
_GETLORACLOUDINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetLoRaCloudIntegrationResponse',
full_name='api.GetLoRaCloudIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetLoRaCloudIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4840,
serialized_end=4921,
)
_UPDATELORACLOUDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateLoRaCloudIntegrationRequest',
full_name='api.UpdateLoRaCloudIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateLoRaCloudIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=4923,
serialized_end=5006,
)
_DELETELORACLOUDINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteLoRaCloudIntegrationRequest',
full_name='api.DeleteLoRaCloudIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteLoRaCloudIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5008,
serialized_end=5082,
)
_GCPPUBSUBINTEGRATION = _descriptor.Descriptor(
name='GCPPubSubIntegration',
full_name='api.GCPPubSubIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GCPPubSubIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='marshaler', full_name='api.GCPPubSubIntegration.marshaler', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='credentials_file', full_name='api.GCPPubSubIntegration.credentials_file', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='project_id', full_name='api.GCPPubSubIntegration.project_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='projectID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='topic_name', full_name='api.GCPPubSubIntegration.topic_name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5085,
serialized_end=5258,
)
_CREATEGCPPUBSUBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateGCPPubSubIntegrationRequest',
full_name='api.CreateGCPPubSubIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateGCPPubSubIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5260,
serialized_end=5343,
)
_GETGCPPUBSUBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetGCPPubSubIntegrationRequest',
full_name='api.GetGCPPubSubIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetGCPPubSubIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5345,
serialized_end=5416,
)
_GETGCPPUBSUBINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetGCPPubSubIntegrationResponse',
full_name='api.GetGCPPubSubIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetGCPPubSubIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5418,
serialized_end=5499,
)
_UPDATEGCPPUBSUBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateGCPPubSubIntegrationRequest',
full_name='api.UpdateGCPPubSubIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateGCPPubSubIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5501,
serialized_end=5584,
)
_DELETEGCPPUBSUBINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteGCPPubSubIntegrationRequest',
full_name='api.DeleteGCPPubSubIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteGCPPubSubIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5586,
serialized_end=5660,
)
_AWSSNSINTEGRATION = _descriptor.Descriptor(
name='AWSSNSIntegration',
full_name='api.AWSSNSIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.AWSSNSIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='marshaler', full_name='api.AWSSNSIntegration.marshaler', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='region', full_name='api.AWSSNSIntegration.region', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='access_key_id', full_name='api.AWSSNSIntegration.access_key_id', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='accessKeyID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='secret_access_key', full_name='api.AWSSNSIntegration.secret_access_key', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='topic_arn', full_name='api.AWSSNSIntegration.topic_arn', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='topicARN', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5663,
serialized_end=5864,
)
_CREATEAWSSNSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateAWSSNSIntegrationRequest',
full_name='api.CreateAWSSNSIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateAWSSNSIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5866,
serialized_end=5943,
)
_GETAWSSNSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetAWSSNSIntegrationRequest',
full_name='api.GetAWSSNSIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetAWSSNSIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=5945,
serialized_end=6013,
)
_GETAWSSNSINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetAWSSNSIntegrationResponse',
full_name='api.GetAWSSNSIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetAWSSNSIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6015,
serialized_end=6090,
)
_UPDATEAWSSNSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateAWSSNSIntegrationRequest',
full_name='api.UpdateAWSSNSIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateAWSSNSIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6092,
serialized_end=6169,
)
_DELETEAWSSNSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteAWSSNSIntegrationRequest',
full_name='api.DeleteAWSSNSIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteAWSSNSIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6171,
serialized_end=6242,
)
_AZURESERVICEBUSINTEGRATION = _descriptor.Descriptor(
name='AzureServiceBusIntegration',
full_name='api.AzureServiceBusIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.AzureServiceBusIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='marshaler', full_name='api.AzureServiceBusIntegration.marshaler', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='connection_string', full_name='api.AzureServiceBusIntegration.connection_string', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='publish_name', full_name='api.AzureServiceBusIntegration.publish_name', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6245,
serialized_end=6396,
)
_CREATEAZURESERVICEBUSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreateAzureServiceBusIntegrationRequest',
full_name='api.CreateAzureServiceBusIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreateAzureServiceBusIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6398,
serialized_end=6493,
)
_GETAZURESERVICEBUSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetAzureServiceBusIntegrationRequest',
full_name='api.GetAzureServiceBusIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetAzureServiceBusIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6495,
serialized_end=6572,
)
_GETAZURESERVICEBUSINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetAzureServiceBusIntegrationResponse',
full_name='api.GetAzureServiceBusIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetAzureServiceBusIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6574,
serialized_end=6667,
)
_UPDATEAZURESERVICEBUSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdateAzureServiceBusIntegrationRequest',
full_name='api.UpdateAzureServiceBusIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdateAzureServiceBusIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6669,
serialized_end=6764,
)
_DELETEAZURESERVICEBUSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeleteAzureServiceBusIntegrationRequest',
full_name='api.DeleteAzureServiceBusIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeleteAzureServiceBusIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6766,
serialized_end=6846,
)
_PILOTTHINGSINTEGRATION = _descriptor.Descriptor(
name='PilotThingsIntegration',
full_name='api.PilotThingsIntegration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.PilotThingsIntegration.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='server', full_name='api.PilotThingsIntegration.server', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='token', full_name='api.PilotThingsIntegration.token', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6848,
serialized_end=6942,
)
_CREATEPILOTTHINGSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='CreatePilotThingsIntegrationRequest',
full_name='api.CreatePilotThingsIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.CreatePilotThingsIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=6944,
serialized_end=7031,
)
_GETPILOTTHINGSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='GetPilotThingsIntegrationRequest',
full_name='api.GetPilotThingsIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GetPilotThingsIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7033,
serialized_end=7106,
)
_GETPILOTTHINGSINTEGRATIONRESPONSE = _descriptor.Descriptor(
name='GetPilotThingsIntegrationResponse',
full_name='api.GetPilotThingsIntegrationResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.GetPilotThingsIntegrationResponse.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7108,
serialized_end=7193,
)
_UPDATEPILOTTHINGSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='UpdatePilotThingsIntegrationRequest',
full_name='api.UpdatePilotThingsIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='integration', full_name='api.UpdatePilotThingsIntegrationRequest.integration', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7195,
serialized_end=7282,
)
_DELETEPILOTTHINGSINTEGRATIONREQUEST = _descriptor.Descriptor(
name='DeletePilotThingsIntegrationRequest',
full_name='api.DeletePilotThingsIntegrationRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.DeletePilotThingsIntegrationRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7284,
serialized_end=7360,
)
_GENERATEMQTTINTEGRATIONCLIENTCERTIFICATEREQUEST = _descriptor.Descriptor(
name='GenerateMQTTIntegrationClientCertificateRequest',
full_name='api.GenerateMQTTIntegrationClientCertificateRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='application_id', full_name='api.GenerateMQTTIntegrationClientCertificateRequest.application_id', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, json_name='applicationID', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7362,
serialized_end=7450,
)
_GENERATEMQTTINTEGRATIONCLIENTCERTIFICATERESPONSE = _descriptor.Descriptor(
name='GenerateMQTTIntegrationClientCertificateResponse',
full_name='api.GenerateMQTTIntegrationClientCertificateResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tls_cert', full_name='api.GenerateMQTTIntegrationClientCertificateResponse.tls_cert', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tls_key', full_name='api.GenerateMQTTIntegrationClientCertificateResponse.tls_key', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='ca_cert', full_name='api.GenerateMQTTIntegrationClientCertificateResponse.ca_cert', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expires_at', full_name='api.GenerateMQTTIntegrationClientCertificateResponse.expires_at', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=7453,
serialized_end=7603,
)
_CREATEAPPLICATIONREQUEST.fields_by_name['application'].message_type = _APPLICATION
_GETAPPLICATIONRESPONSE.fields_by_name['application'].message_type = _APPLICATION
_UPDATEAPPLICATIONREQUEST.fields_by_name['application'].message_type = _APPLICATION
_LISTAPPLICATIONRESPONSE.fields_by_name['result'].message_type = _APPLICATIONLISTITEM
_HTTPINTEGRATION.fields_by_name['headers'].message_type = _HTTPINTEGRATIONHEADER
_HTTPINTEGRATION.fields_by_name['marshaler'].enum_type = _MARSHALER
_CREATEHTTPINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _HTTPINTEGRATION
_GETHTTPINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _HTTPINTEGRATION
_UPDATEHTTPINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _HTTPINTEGRATION
_INTEGRATIONLISTITEM.fields_by_name['kind'].enum_type = _INTEGRATIONKIND
_LISTINTEGRATIONRESPONSE.fields_by_name['result'].message_type = _INTEGRATIONLISTITEM
_INFLUXDBINTEGRATION.fields_by_name['precision'].enum_type = _INFLUXDBPRECISION
_CREATEINFLUXDBINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _INFLUXDBINTEGRATION
_GETINFLUXDBINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _INFLUXDBINTEGRATION
_UPDATEINFLUXDBINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _INFLUXDBINTEGRATION
_CREATETHINGSBOARDINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _THINGSBOARDINTEGRATION
_GETTHINGSBOARDINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _THINGSBOARDINTEGRATION
_UPDATETHINGSBOARDINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _THINGSBOARDINTEGRATION
_CREATEMYDEVICESINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _MYDEVICESINTEGRATION
_GETMYDEVICESINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _MYDEVICESINTEGRATION
_UPDATEMYDEVICESINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _MYDEVICESINTEGRATION
_CREATELORACLOUDINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _LORACLOUDINTEGRATION
_GETLORACLOUDINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _LORACLOUDINTEGRATION
_UPDATELORACLOUDINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _LORACLOUDINTEGRATION
_GCPPUBSUBINTEGRATION.fields_by_name['marshaler'].enum_type = _MARSHALER
_CREATEGCPPUBSUBINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _GCPPUBSUBINTEGRATION
_GETGCPPUBSUBINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _GCPPUBSUBINTEGRATION
_UPDATEGCPPUBSUBINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _GCPPUBSUBINTEGRATION
_AWSSNSINTEGRATION.fields_by_name['marshaler'].enum_type = _MARSHALER
_CREATEAWSSNSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _AWSSNSINTEGRATION
_GETAWSSNSINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _AWSSNSINTEGRATION
_UPDATEAWSSNSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _AWSSNSINTEGRATION
_AZURESERVICEBUSINTEGRATION.fields_by_name['marshaler'].enum_type = _MARSHALER
_CREATEAZURESERVICEBUSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _AZURESERVICEBUSINTEGRATION
_GETAZURESERVICEBUSINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _AZURESERVICEBUSINTEGRATION
_UPDATEAZURESERVICEBUSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _AZURESERVICEBUSINTEGRATION
_CREATEPILOTTHINGSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _PILOTTHINGSINTEGRATION
_GETPILOTTHINGSINTEGRATIONRESPONSE.fields_by_name['integration'].message_type = _PILOTTHINGSINTEGRATION
_UPDATEPILOTTHINGSINTEGRATIONREQUEST.fields_by_name['integration'].message_type = _PILOTTHINGSINTEGRATION
_GENERATEMQTTINTEGRATIONCLIENTCERTIFICATERESPONSE.fields_by_name['expires_at'].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP
DESCRIPTOR.message_types_by_name['Application'] = _APPLICATION
DESCRIPTOR.message_types_by_name['ApplicationListItem'] = _APPLICATIONLISTITEM
DESCRIPTOR.message_types_by_name['CreateApplicationRequest'] = _CREATEAPPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['CreateApplicationResponse'] = _CREATEAPPLICATIONRESPONSE
DESCRIPTOR.message_types_by_name['GetApplicationRequest'] = _GETAPPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['GetApplicationResponse'] = _GETAPPLICATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateApplicationRequest'] = _UPDATEAPPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteApplicationRequest'] = _DELETEAPPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['ListApplicationRequest'] = _LISTAPPLICATIONREQUEST
DESCRIPTOR.message_types_by_name['ListApplicationResponse'] = _LISTAPPLICATIONRESPONSE
DESCRIPTOR.message_types_by_name['HTTPIntegrationHeader'] = _HTTPINTEGRATIONHEADER
DESCRIPTOR.message_types_by_name['HTTPIntegration'] = _HTTPINTEGRATION
DESCRIPTOR.message_types_by_name['CreateHTTPIntegrationRequest'] = _CREATEHTTPINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetHTTPIntegrationRequest'] = _GETHTTPINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetHTTPIntegrationResponse'] = _GETHTTPINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateHTTPIntegrationRequest'] = _UPDATEHTTPINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteHTTPIntegrationRequest'] = _DELETEHTTPINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['ListIntegrationRequest'] = _LISTINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['IntegrationListItem'] = _INTEGRATIONLISTITEM
DESCRIPTOR.message_types_by_name['ListIntegrationResponse'] = _LISTINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['InfluxDBIntegration'] = _INFLUXDBINTEGRATION
DESCRIPTOR.message_types_by_name['CreateInfluxDBIntegrationRequest'] = _CREATEINFLUXDBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetInfluxDBIntegrationRequest'] = _GETINFLUXDBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetInfluxDBIntegrationResponse'] = _GETINFLUXDBINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateInfluxDBIntegrationRequest'] = _UPDATEINFLUXDBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteInfluxDBIntegrationRequest'] = _DELETEINFLUXDBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['ThingsBoardIntegration'] = _THINGSBOARDINTEGRATION
DESCRIPTOR.message_types_by_name['CreateThingsBoardIntegrationRequest'] = _CREATETHINGSBOARDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetThingsBoardIntegrationRequest'] = _GETTHINGSBOARDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetThingsBoardIntegrationResponse'] = _GETTHINGSBOARDINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateThingsBoardIntegrationRequest'] = _UPDATETHINGSBOARDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteThingsBoardIntegrationRequest'] = _DELETETHINGSBOARDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['MyDevicesIntegration'] = _MYDEVICESINTEGRATION
DESCRIPTOR.message_types_by_name['CreateMyDevicesIntegrationRequest'] = _CREATEMYDEVICESINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetMyDevicesIntegrationRequest'] = _GETMYDEVICESINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetMyDevicesIntegrationResponse'] = _GETMYDEVICESINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateMyDevicesIntegrationRequest'] = _UPDATEMYDEVICESINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteMyDevicesIntegrationRequest'] = _DELETEMYDEVICESINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['LoRaCloudIntegration'] = _LORACLOUDINTEGRATION
DESCRIPTOR.message_types_by_name['CreateLoRaCloudIntegrationRequest'] = _CREATELORACLOUDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetLoRaCloudIntegrationRequest'] = _GETLORACLOUDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetLoRaCloudIntegrationResponse'] = _GETLORACLOUDINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateLoRaCloudIntegrationRequest'] = _UPDATELORACLOUDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteLoRaCloudIntegrationRequest'] = _DELETELORACLOUDINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GCPPubSubIntegration'] = _GCPPUBSUBINTEGRATION
DESCRIPTOR.message_types_by_name['CreateGCPPubSubIntegrationRequest'] = _CREATEGCPPUBSUBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetGCPPubSubIntegrationRequest'] = _GETGCPPUBSUBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetGCPPubSubIntegrationResponse'] = _GETGCPPUBSUBINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateGCPPubSubIntegrationRequest'] = _UPDATEGCPPUBSUBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteGCPPubSubIntegrationRequest'] = _DELETEGCPPUBSUBINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['AWSSNSIntegration'] = _AWSSNSINTEGRATION
DESCRIPTOR.message_types_by_name['CreateAWSSNSIntegrationRequest'] = _CREATEAWSSNSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetAWSSNSIntegrationRequest'] = _GETAWSSNSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetAWSSNSIntegrationResponse'] = _GETAWSSNSINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateAWSSNSIntegrationRequest'] = _UPDATEAWSSNSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteAWSSNSIntegrationRequest'] = _DELETEAWSSNSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['AzureServiceBusIntegration'] = _AZURESERVICEBUSINTEGRATION
DESCRIPTOR.message_types_by_name['CreateAzureServiceBusIntegrationRequest'] = _CREATEAZURESERVICEBUSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetAzureServiceBusIntegrationRequest'] = _GETAZURESERVICEBUSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetAzureServiceBusIntegrationResponse'] = _GETAZURESERVICEBUSINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdateAzureServiceBusIntegrationRequest'] = _UPDATEAZURESERVICEBUSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeleteAzureServiceBusIntegrationRequest'] = _DELETEAZURESERVICEBUSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['PilotThingsIntegration'] = _PILOTTHINGSINTEGRATION
DESCRIPTOR.message_types_by_name['CreatePilotThingsIntegrationRequest'] = _CREATEPILOTTHINGSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetPilotThingsIntegrationRequest'] = _GETPILOTTHINGSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GetPilotThingsIntegrationResponse'] = _GETPILOTTHINGSINTEGRATIONRESPONSE
DESCRIPTOR.message_types_by_name['UpdatePilotThingsIntegrationRequest'] = _UPDATEPILOTTHINGSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['DeletePilotThingsIntegrationRequest'] = _DELETEPILOTTHINGSINTEGRATIONREQUEST
DESCRIPTOR.message_types_by_name['GenerateMQTTIntegrationClientCertificateRequest'] = _GENERATEMQTTINTEGRATIONCLIENTCERTIFICATEREQUEST
DESCRIPTOR.message_types_by_name['GenerateMQTTIntegrationClientCertificateResponse'] = _GENERATEMQTTINTEGRATIONCLIENTCERTIFICATERESPONSE
DESCRIPTOR.enum_types_by_name['IntegrationKind'] = _INTEGRATIONKIND
DESCRIPTOR.enum_types_by_name['Marshaler'] = _MARSHALER
DESCRIPTOR.enum_types_by_name['InfluxDBPrecision'] = _INFLUXDBPRECISION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Application = _reflection.GeneratedProtocolMessageType('Application', (_message.Message,), {
'DESCRIPTOR' : _APPLICATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.Application)
})
_sym_db.RegisterMessage(Application)
ApplicationListItem = _reflection.GeneratedProtocolMessageType('ApplicationListItem', (_message.Message,), {
'DESCRIPTOR' : _APPLICATIONLISTITEM,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ApplicationListItem)
})
_sym_db.RegisterMessage(ApplicationListItem)
CreateApplicationRequest = _reflection.GeneratedProtocolMessageType('CreateApplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEAPPLICATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateApplicationRequest)
})
_sym_db.RegisterMessage(CreateApplicationRequest)
CreateApplicationResponse = _reflection.GeneratedProtocolMessageType('CreateApplicationResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEAPPLICATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateApplicationResponse)
})
_sym_db.RegisterMessage(CreateApplicationResponse)
GetApplicationRequest = _reflection.GeneratedProtocolMessageType('GetApplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAPPLICATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetApplicationRequest)
})
_sym_db.RegisterMessage(GetApplicationRequest)
GetApplicationResponse = _reflection.GeneratedProtocolMessageType('GetApplicationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETAPPLICATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetApplicationResponse)
})
_sym_db.RegisterMessage(GetApplicationResponse)
UpdateApplicationRequest = _reflection.GeneratedProtocolMessageType('UpdateApplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEAPPLICATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateApplicationRequest)
})
_sym_db.RegisterMessage(UpdateApplicationRequest)
DeleteApplicationRequest = _reflection.GeneratedProtocolMessageType('DeleteApplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEAPPLICATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteApplicationRequest)
})
_sym_db.RegisterMessage(DeleteApplicationRequest)
ListApplicationRequest = _reflection.GeneratedProtocolMessageType('ListApplicationRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTAPPLICATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ListApplicationRequest)
})
_sym_db.RegisterMessage(ListApplicationRequest)
ListApplicationResponse = _reflection.GeneratedProtocolMessageType('ListApplicationResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTAPPLICATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ListApplicationResponse)
})
_sym_db.RegisterMessage(ListApplicationResponse)
HTTPIntegrationHeader = _reflection.GeneratedProtocolMessageType('HTTPIntegrationHeader', (_message.Message,), {
'DESCRIPTOR' : _HTTPINTEGRATIONHEADER,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.HTTPIntegrationHeader)
})
_sym_db.RegisterMessage(HTTPIntegrationHeader)
HTTPIntegration = _reflection.GeneratedProtocolMessageType('HTTPIntegration', (_message.Message,), {
'DESCRIPTOR' : _HTTPINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.HTTPIntegration)
})
_sym_db.RegisterMessage(HTTPIntegration)
CreateHTTPIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateHTTPIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEHTTPINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateHTTPIntegrationRequest)
})
_sym_db.RegisterMessage(CreateHTTPIntegrationRequest)
GetHTTPIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetHTTPIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETHTTPINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetHTTPIntegrationRequest)
})
_sym_db.RegisterMessage(GetHTTPIntegrationRequest)
GetHTTPIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetHTTPIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETHTTPINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetHTTPIntegrationResponse)
})
_sym_db.RegisterMessage(GetHTTPIntegrationResponse)
UpdateHTTPIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateHTTPIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEHTTPINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateHTTPIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateHTTPIntegrationRequest)
DeleteHTTPIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteHTTPIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEHTTPINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteHTTPIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteHTTPIntegrationRequest)
ListIntegrationRequest = _reflection.GeneratedProtocolMessageType('ListIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _LISTINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ListIntegrationRequest)
})
_sym_db.RegisterMessage(ListIntegrationRequest)
IntegrationListItem = _reflection.GeneratedProtocolMessageType('IntegrationListItem', (_message.Message,), {
'DESCRIPTOR' : _INTEGRATIONLISTITEM,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.IntegrationListItem)
})
_sym_db.RegisterMessage(IntegrationListItem)
ListIntegrationResponse = _reflection.GeneratedProtocolMessageType('ListIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ListIntegrationResponse)
})
_sym_db.RegisterMessage(ListIntegrationResponse)
InfluxDBIntegration = _reflection.GeneratedProtocolMessageType('InfluxDBIntegration', (_message.Message,), {
'DESCRIPTOR' : _INFLUXDBINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.InfluxDBIntegration)
})
_sym_db.RegisterMessage(InfluxDBIntegration)
CreateInfluxDBIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateInfluxDBIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEINFLUXDBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateInfluxDBIntegrationRequest)
})
_sym_db.RegisterMessage(CreateInfluxDBIntegrationRequest)
GetInfluxDBIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetInfluxDBIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETINFLUXDBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetInfluxDBIntegrationRequest)
})
_sym_db.RegisterMessage(GetInfluxDBIntegrationRequest)
GetInfluxDBIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetInfluxDBIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETINFLUXDBINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetInfluxDBIntegrationResponse)
})
_sym_db.RegisterMessage(GetInfluxDBIntegrationResponse)
UpdateInfluxDBIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateInfluxDBIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEINFLUXDBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateInfluxDBIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateInfluxDBIntegrationRequest)
DeleteInfluxDBIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteInfluxDBIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEINFLUXDBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteInfluxDBIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteInfluxDBIntegrationRequest)
ThingsBoardIntegration = _reflection.GeneratedProtocolMessageType('ThingsBoardIntegration', (_message.Message,), {
'DESCRIPTOR' : _THINGSBOARDINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.ThingsBoardIntegration)
})
_sym_db.RegisterMessage(ThingsBoardIntegration)
CreateThingsBoardIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateThingsBoardIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATETHINGSBOARDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateThingsBoardIntegrationRequest)
})
_sym_db.RegisterMessage(CreateThingsBoardIntegrationRequest)
GetThingsBoardIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetThingsBoardIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETTHINGSBOARDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetThingsBoardIntegrationRequest)
})
_sym_db.RegisterMessage(GetThingsBoardIntegrationRequest)
GetThingsBoardIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetThingsBoardIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETTHINGSBOARDINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetThingsBoardIntegrationResponse)
})
_sym_db.RegisterMessage(GetThingsBoardIntegrationResponse)
UpdateThingsBoardIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateThingsBoardIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATETHINGSBOARDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateThingsBoardIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateThingsBoardIntegrationRequest)
DeleteThingsBoardIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteThingsBoardIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETETHINGSBOARDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteThingsBoardIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteThingsBoardIntegrationRequest)
MyDevicesIntegration = _reflection.GeneratedProtocolMessageType('MyDevicesIntegration', (_message.Message,), {
'DESCRIPTOR' : _MYDEVICESINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.MyDevicesIntegration)
})
_sym_db.RegisterMessage(MyDevicesIntegration)
CreateMyDevicesIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateMyDevicesIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEMYDEVICESINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateMyDevicesIntegrationRequest)
})
_sym_db.RegisterMessage(CreateMyDevicesIntegrationRequest)
GetMyDevicesIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetMyDevicesIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMYDEVICESINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetMyDevicesIntegrationRequest)
})
_sym_db.RegisterMessage(GetMyDevicesIntegrationRequest)
GetMyDevicesIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetMyDevicesIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMYDEVICESINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetMyDevicesIntegrationResponse)
})
_sym_db.RegisterMessage(GetMyDevicesIntegrationResponse)
UpdateMyDevicesIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateMyDevicesIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMYDEVICESINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateMyDevicesIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateMyDevicesIntegrationRequest)
DeleteMyDevicesIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteMyDevicesIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMYDEVICESINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteMyDevicesIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteMyDevicesIntegrationRequest)
LoRaCloudIntegration = _reflection.GeneratedProtocolMessageType('LoRaCloudIntegration', (_message.Message,), {
'DESCRIPTOR' : _LORACLOUDINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.LoRaCloudIntegration)
})
_sym_db.RegisterMessage(LoRaCloudIntegration)
CreateLoRaCloudIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateLoRaCloudIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATELORACLOUDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateLoRaCloudIntegrationRequest)
})
_sym_db.RegisterMessage(CreateLoRaCloudIntegrationRequest)
GetLoRaCloudIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetLoRaCloudIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETLORACLOUDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetLoRaCloudIntegrationRequest)
})
_sym_db.RegisterMessage(GetLoRaCloudIntegrationRequest)
GetLoRaCloudIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetLoRaCloudIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETLORACLOUDINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetLoRaCloudIntegrationResponse)
})
_sym_db.RegisterMessage(GetLoRaCloudIntegrationResponse)
UpdateLoRaCloudIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateLoRaCloudIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATELORACLOUDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateLoRaCloudIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateLoRaCloudIntegrationRequest)
DeleteLoRaCloudIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteLoRaCloudIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETELORACLOUDINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteLoRaCloudIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteLoRaCloudIntegrationRequest)
GCPPubSubIntegration = _reflection.GeneratedProtocolMessageType('GCPPubSubIntegration', (_message.Message,), {
'DESCRIPTOR' : _GCPPUBSUBINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GCPPubSubIntegration)
})
_sym_db.RegisterMessage(GCPPubSubIntegration)
CreateGCPPubSubIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateGCPPubSubIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEGCPPUBSUBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateGCPPubSubIntegrationRequest)
})
_sym_db.RegisterMessage(CreateGCPPubSubIntegrationRequest)
GetGCPPubSubIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetGCPPubSubIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETGCPPUBSUBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetGCPPubSubIntegrationRequest)
})
_sym_db.RegisterMessage(GetGCPPubSubIntegrationRequest)
GetGCPPubSubIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetGCPPubSubIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETGCPPUBSUBINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetGCPPubSubIntegrationResponse)
})
_sym_db.RegisterMessage(GetGCPPubSubIntegrationResponse)
UpdateGCPPubSubIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateGCPPubSubIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEGCPPUBSUBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateGCPPubSubIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateGCPPubSubIntegrationRequest)
DeleteGCPPubSubIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteGCPPubSubIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEGCPPUBSUBINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteGCPPubSubIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteGCPPubSubIntegrationRequest)
AWSSNSIntegration = _reflection.GeneratedProtocolMessageType('AWSSNSIntegration', (_message.Message,), {
'DESCRIPTOR' : _AWSSNSINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.AWSSNSIntegration)
})
_sym_db.RegisterMessage(AWSSNSIntegration)
CreateAWSSNSIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateAWSSNSIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEAWSSNSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateAWSSNSIntegrationRequest)
})
_sym_db.RegisterMessage(CreateAWSSNSIntegrationRequest)
GetAWSSNSIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetAWSSNSIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAWSSNSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetAWSSNSIntegrationRequest)
})
_sym_db.RegisterMessage(GetAWSSNSIntegrationRequest)
GetAWSSNSIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetAWSSNSIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETAWSSNSINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetAWSSNSIntegrationResponse)
})
_sym_db.RegisterMessage(GetAWSSNSIntegrationResponse)
UpdateAWSSNSIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateAWSSNSIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEAWSSNSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateAWSSNSIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateAWSSNSIntegrationRequest)
DeleteAWSSNSIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteAWSSNSIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEAWSSNSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteAWSSNSIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteAWSSNSIntegrationRequest)
AzureServiceBusIntegration = _reflection.GeneratedProtocolMessageType('AzureServiceBusIntegration', (_message.Message,), {
'DESCRIPTOR' : _AZURESERVICEBUSINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.AzureServiceBusIntegration)
})
_sym_db.RegisterMessage(AzureServiceBusIntegration)
CreateAzureServiceBusIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreateAzureServiceBusIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEAZURESERVICEBUSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreateAzureServiceBusIntegrationRequest)
})
_sym_db.RegisterMessage(CreateAzureServiceBusIntegrationRequest)
GetAzureServiceBusIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetAzureServiceBusIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETAZURESERVICEBUSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetAzureServiceBusIntegrationRequest)
})
_sym_db.RegisterMessage(GetAzureServiceBusIntegrationRequest)
GetAzureServiceBusIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetAzureServiceBusIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETAZURESERVICEBUSINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetAzureServiceBusIntegrationResponse)
})
_sym_db.RegisterMessage(GetAzureServiceBusIntegrationResponse)
UpdateAzureServiceBusIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdateAzureServiceBusIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEAZURESERVICEBUSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdateAzureServiceBusIntegrationRequest)
})
_sym_db.RegisterMessage(UpdateAzureServiceBusIntegrationRequest)
DeleteAzureServiceBusIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeleteAzureServiceBusIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEAZURESERVICEBUSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeleteAzureServiceBusIntegrationRequest)
})
_sym_db.RegisterMessage(DeleteAzureServiceBusIntegrationRequest)
PilotThingsIntegration = _reflection.GeneratedProtocolMessageType('PilotThingsIntegration', (_message.Message,), {
'DESCRIPTOR' : _PILOTTHINGSINTEGRATION,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.PilotThingsIntegration)
})
_sym_db.RegisterMessage(PilotThingsIntegration)
CreatePilotThingsIntegrationRequest = _reflection.GeneratedProtocolMessageType('CreatePilotThingsIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEPILOTTHINGSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.CreatePilotThingsIntegrationRequest)
})
_sym_db.RegisterMessage(CreatePilotThingsIntegrationRequest)
GetPilotThingsIntegrationRequest = _reflection.GeneratedProtocolMessageType('GetPilotThingsIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _GETPILOTTHINGSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetPilotThingsIntegrationRequest)
})
_sym_db.RegisterMessage(GetPilotThingsIntegrationRequest)
GetPilotThingsIntegrationResponse = _reflection.GeneratedProtocolMessageType('GetPilotThingsIntegrationResponse', (_message.Message,), {
'DESCRIPTOR' : _GETPILOTTHINGSINTEGRATIONRESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GetPilotThingsIntegrationResponse)
})
_sym_db.RegisterMessage(GetPilotThingsIntegrationResponse)
UpdatePilotThingsIntegrationRequest = _reflection.GeneratedProtocolMessageType('UpdatePilotThingsIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEPILOTTHINGSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.UpdatePilotThingsIntegrationRequest)
})
_sym_db.RegisterMessage(UpdatePilotThingsIntegrationRequest)
DeletePilotThingsIntegrationRequest = _reflection.GeneratedProtocolMessageType('DeletePilotThingsIntegrationRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEPILOTTHINGSINTEGRATIONREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.DeletePilotThingsIntegrationRequest)
})
_sym_db.RegisterMessage(DeletePilotThingsIntegrationRequest)
GenerateMQTTIntegrationClientCertificateRequest = _reflection.GeneratedProtocolMessageType('GenerateMQTTIntegrationClientCertificateRequest', (_message.Message,), {
'DESCRIPTOR' : _GENERATEMQTTINTEGRATIONCLIENTCERTIFICATEREQUEST,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GenerateMQTTIntegrationClientCertificateRequest)
})
_sym_db.RegisterMessage(GenerateMQTTIntegrationClientCertificateRequest)
GenerateMQTTIntegrationClientCertificateResponse = _reflection.GeneratedProtocolMessageType('GenerateMQTTIntegrationClientCertificateResponse', (_message.Message,), {
'DESCRIPTOR' : _GENERATEMQTTINTEGRATIONCLIENTCERTIFICATERESPONSE,
'__module__' : 'chirpstack_api.as_pb.external.api.application_pb2'
# @@protoc_insertion_point(class_scope:api.GenerateMQTTIntegrationClientCertificateResponse)
})
_sym_db.RegisterMessage(GenerateMQTTIntegrationClientCertificateResponse)
DESCRIPTOR._options = None
_APPLICATIONSERVICE = _descriptor.ServiceDescriptor(
name='ApplicationService',
full_name='api.ApplicationService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=7899,
serialized_end=14997,
methods=[
_descriptor.MethodDescriptor(
name='Create',
full_name='api.ApplicationService.Create',
index=0,
containing_service=None,
input_type=_CREATEAPPLICATIONREQUEST,
output_type=_CREATEAPPLICATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\026\"\021/api/applications:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Get',
full_name='api.ApplicationService.Get',
index=1,
containing_service=None,
input_type=_GETAPPLICATIONREQUEST,
output_type=_GETAPPLICATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\030\022\026/api/applications/{id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Update',
full_name='api.ApplicationService.Update',
index=2,
containing_service=None,
input_type=_UPDATEAPPLICATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002\'\032\"/api/applications/{application.id}:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Delete',
full_name='api.ApplicationService.Delete',
index=3,
containing_service=None,
input_type=_DELETEAPPLICATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002\030*\026/api/applications/{id}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='List',
full_name='api.ApplicationService.List',
index=4,
containing_service=None,
input_type=_LISTAPPLICATIONREQUEST,
output_type=_LISTAPPLICATIONRESPONSE,
serialized_options=b'\202\323\344\223\002\023\022\021/api/applications',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateHTTPIntegration',
full_name='api.ApplicationService.CreateHTTPIntegration',
index=5,
containing_service=None,
input_type=_CREATEHTTPINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002E\"@/api/applications/{integration.application_id}/integrations/http:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetHTTPIntegration',
full_name='api.ApplicationService.GetHTTPIntegration',
index=6,
containing_service=None,
input_type=_GETHTTPINTEGRATIONREQUEST,
output_type=_GETHTTPINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\0026\0224/api/applications/{application_id}/integrations/http',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateHTTPIntegration',
full_name='api.ApplicationService.UpdateHTTPIntegration',
index=7,
containing_service=None,
input_type=_UPDATEHTTPINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002E\032@/api/applications/{integration.application_id}/integrations/http:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteHTTPIntegration',
full_name='api.ApplicationService.DeleteHTTPIntegration',
index=8,
containing_service=None,
input_type=_DELETEHTTPINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\0026*4/api/applications/{application_id}/integrations/http',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateInfluxDBIntegration',
full_name='api.ApplicationService.CreateInfluxDBIntegration',
index=9,
containing_service=None,
input_type=_CREATEINFLUXDBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002I\"D/api/applications/{integration.application_id}/integrations/influxdb:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetInfluxDBIntegration',
full_name='api.ApplicationService.GetInfluxDBIntegration',
index=10,
containing_service=None,
input_type=_GETINFLUXDBINTEGRATIONREQUEST,
output_type=_GETINFLUXDBINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002:\0228/api/applications/{application_id}/integrations/influxdb',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateInfluxDBIntegration',
full_name='api.ApplicationService.UpdateInfluxDBIntegration',
index=11,
containing_service=None,
input_type=_UPDATEINFLUXDBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002I\032D/api/applications/{integration.application_id}/integrations/influxdb:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteInfluxDBIntegration',
full_name='api.ApplicationService.DeleteInfluxDBIntegration',
index=12,
containing_service=None,
input_type=_DELETEINFLUXDBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002:*8/api/applications/{application_id}/integrations/influxdb',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateThingsBoardIntegration',
full_name='api.ApplicationService.CreateThingsBoardIntegration',
index=13,
containing_service=None,
input_type=_CREATETHINGSBOARDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002L\"G/api/applications/{integration.application_id}/integrations/thingsboard:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetThingsBoardIntegration',
full_name='api.ApplicationService.GetThingsBoardIntegration',
index=14,
containing_service=None,
input_type=_GETTHINGSBOARDINTEGRATIONREQUEST,
output_type=_GETTHINGSBOARDINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002=\022;/api/applications/{application_id}/integrations/thingsboard',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateThingsBoardIntegration',
full_name='api.ApplicationService.UpdateThingsBoardIntegration',
index=15,
containing_service=None,
input_type=_UPDATETHINGSBOARDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002L\032G/api/applications/{integration.application_id}/integrations/thingsboard:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteThingsBoardIntegration',
full_name='api.ApplicationService.DeleteThingsBoardIntegration',
index=16,
containing_service=None,
input_type=_DELETETHINGSBOARDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002=*;/api/applications/{application_id}/integrations/thingsboard',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateMyDevicesIntegration',
full_name='api.ApplicationService.CreateMyDevicesIntegration',
index=17,
containing_service=None,
input_type=_CREATEMYDEVICESINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002J\"E/api/applications/{integration.application_id}/integrations/mydevices:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetMyDevicesIntegration',
full_name='api.ApplicationService.GetMyDevicesIntegration',
index=18,
containing_service=None,
input_type=_GETMYDEVICESINTEGRATIONREQUEST,
output_type=_GETMYDEVICESINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002;\0229/api/applications/{application_id}/integrations/mydevices',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateMyDevicesIntegration',
full_name='api.ApplicationService.UpdateMyDevicesIntegration',
index=19,
containing_service=None,
input_type=_UPDATEMYDEVICESINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002J\032E/api/applications/{integration.application_id}/integrations/mydevices:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteMyDevicesIntegration',
full_name='api.ApplicationService.DeleteMyDevicesIntegration',
index=20,
containing_service=None,
input_type=_DELETEMYDEVICESINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002;*9/api/applications/{application_id}/integrations/mydevices',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateLoRaCloudIntegration',
full_name='api.ApplicationService.CreateLoRaCloudIntegration',
index=21,
containing_service=None,
input_type=_CREATELORACLOUDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002J\"E/api/applications/{integration.application_id}/integrations/loracloud:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetLoRaCloudIntegration',
full_name='api.ApplicationService.GetLoRaCloudIntegration',
index=22,
containing_service=None,
input_type=_GETLORACLOUDINTEGRATIONREQUEST,
output_type=_GETLORACLOUDINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002;\0229/api/applications/{application_id}/integrations/loracloud',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateLoRaCloudIntegration',
full_name='api.ApplicationService.UpdateLoRaCloudIntegration',
index=23,
containing_service=None,
input_type=_UPDATELORACLOUDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002J\032E/api/applications/{integration.application_id}/integrations/loracloud:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteLoRaCloudIntegration',
full_name='api.ApplicationService.DeleteLoRaCloudIntegration',
index=24,
containing_service=None,
input_type=_DELETELORACLOUDINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002;*9/api/applications/{application_id}/integrations/loracloud',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateGCPPubSubIntegration',
full_name='api.ApplicationService.CreateGCPPubSubIntegration',
index=25,
containing_service=None,
input_type=_CREATEGCPPUBSUBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002L\"G/api/applications/{integration.application_id}/integrations/gcp-pub-sub:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetGCPPubSubIntegration',
full_name='api.ApplicationService.GetGCPPubSubIntegration',
index=26,
containing_service=None,
input_type=_GETGCPPUBSUBINTEGRATIONREQUEST,
output_type=_GETGCPPUBSUBINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002=\022;/api/applications/{application_id}/integrations/gcp-pub-sub',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateGCPPubSubIntegration',
full_name='api.ApplicationService.UpdateGCPPubSubIntegration',
index=27,
containing_service=None,
input_type=_UPDATEGCPPUBSUBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002L\032G/api/applications/{integration.application_id}/integrations/gcp-pub-sub:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteGCPPubSubIntegration',
full_name='api.ApplicationService.DeleteGCPPubSubIntegration',
index=28,
containing_service=None,
input_type=_DELETEGCPPUBSUBINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002=*;/api/applications/{application_id}/integrations/gcp-pub-sub',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateAWSSNSIntegration',
full_name='api.ApplicationService.CreateAWSSNSIntegration',
index=29,
containing_service=None,
input_type=_CREATEAWSSNSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002H\"C/api/applications/{integration.application_id}/integrations/aws-sns:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetAWSSNSIntegration',
full_name='api.ApplicationService.GetAWSSNSIntegration',
index=30,
containing_service=None,
input_type=_GETAWSSNSINTEGRATIONREQUEST,
output_type=_GETAWSSNSINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\0029\0227/api/applications/{application_id}/integrations/aws-sns',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateAWSSNSIntegration',
full_name='api.ApplicationService.UpdateAWSSNSIntegration',
index=31,
containing_service=None,
input_type=_UPDATEAWSSNSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002H\032C/api/applications/{integration.application_id}/integrations/aws-sns:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteAWSSNSIntegration',
full_name='api.ApplicationService.DeleteAWSSNSIntegration',
index=32,
containing_service=None,
input_type=_DELETEAWSSNSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\0029*7/api/applications/{application_id}/integrations/aws-sns',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateAzureServiceBusIntegration',
full_name='api.ApplicationService.CreateAzureServiceBusIntegration',
index=33,
containing_service=None,
input_type=_CREATEAZURESERVICEBUSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002R\"M/api/applications/{integration.application_id}/integrations/azure-service-bus:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetAzureServiceBusIntegration',
full_name='api.ApplicationService.GetAzureServiceBusIntegration',
index=34,
containing_service=None,
input_type=_GETAZURESERVICEBUSINTEGRATIONREQUEST,
output_type=_GETAZURESERVICEBUSINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002C\022A/api/applications/{application_id}/integrations/azure-service-bus',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateAzureServiceBusIntegration',
full_name='api.ApplicationService.UpdateAzureServiceBusIntegration',
index=35,
containing_service=None,
input_type=_UPDATEAZURESERVICEBUSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002R\032M/api/applications/{integration.application_id}/integrations/azure-service-bus:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteAzureServiceBusIntegration',
full_name='api.ApplicationService.DeleteAzureServiceBusIntegration',
index=36,
containing_service=None,
input_type=_DELETEAZURESERVICEBUSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002C*A/api/applications/{application_id}/integrations/azure-service-bus',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreatePilotThingsIntegration',
full_name='api.ApplicationService.CreatePilotThingsIntegration',
index=37,
containing_service=None,
input_type=_CREATEPILOTTHINGSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002M\"H/api/applications/{integration.application_id}/integrations/pilot-things:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetPilotThingsIntegration',
full_name='api.ApplicationService.GetPilotThingsIntegration',
index=38,
containing_service=None,
input_type=_GETPILOTTHINGSINTEGRATIONREQUEST,
output_type=_GETPILOTTHINGSINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\002>\022</api/applications/{application_id}/integrations/pilot-things',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdatePilotThingsIntegration',
full_name='api.ApplicationService.UpdatePilotThingsIntegration',
index=39,
containing_service=None,
input_type=_UPDATEPILOTTHINGSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002M\032H/api/applications/{integration.application_id}/integrations/pilot-things:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeletePilotThingsIntegration',
full_name='api.ApplicationService.DeletePilotThingsIntegration',
index=40,
containing_service=None,
input_type=_DELETEPILOTTHINGSINTEGRATIONREQUEST,
output_type=google_dot_protobuf_dot_empty__pb2._EMPTY,
serialized_options=b'\202\323\344\223\002>*</api/applications/{application_id}/integrations/pilot-things',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ListIntegrations',
full_name='api.ApplicationService.ListIntegrations',
index=41,
containing_service=None,
input_type=_LISTINTEGRATIONREQUEST,
output_type=_LISTINTEGRATIONRESPONSE,
serialized_options=b'\202\323\344\223\0021\022//api/applications/{application_id}/integrations',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GenerateMQTTIntegrationClientCertificate',
full_name='api.ApplicationService.GenerateMQTTIntegrationClientCertificate',
index=42,
containing_service=None,
input_type=_GENERATEMQTTINTEGRATIONCLIENTCERTIFICATEREQUEST,
output_type=_GENERATEMQTTINTEGRATIONCLIENTCERTIFICATERESPONSE,
serialized_options=b'\202\323\344\223\002B\"@/api/applications/{application_id}/integrations/mqtt/certificate',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_APPLICATIONSERVICE)
DESCRIPTOR.services_by_name['ApplicationService'] = _APPLICATIONSERVICE
# @@protoc_insertion_point(module_scope)
| 45.520703
| 21,023
| 0.787211
|
e7284e573b8cf4ac4dba6d3798d726c9029cc293
| 18,660
|
py
|
Python
|
src/streamlink/session.py
|
kronoc/streamlink
|
3fc9da85f287d344c5ebe1d3881b4b481f0c39a6
|
[
"BSD-2-Clause"
] | null | null | null |
src/streamlink/session.py
|
kronoc/streamlink
|
3fc9da85f287d344c5ebe1d3881b4b481f0c39a6
|
[
"BSD-2-Clause"
] | null | null | null |
src/streamlink/session.py
|
kronoc/streamlink
|
3fc9da85f287d344c5ebe1d3881b4b481f0c39a6
|
[
"BSD-2-Clause"
] | null | null | null |
import logging
import pkgutil
from collections import OrderedDict
from functools import lru_cache
from socket import AF_INET, AF_INET6
from typing import Dict, Optional, Type
import requests
import requests.packages.urllib3.util.connection as urllib3_connection
from requests.packages.urllib3.util.connection import allowed_gai_family
from streamlink import __version__, plugins
from streamlink.compat import is_win32
from streamlink.exceptions import NoPluginError, PluginError
from streamlink.logger import StreamlinkLogger
from streamlink.options import Options
from streamlink.plugin.api.http_session import HTTPSession
from streamlink.plugin.plugin import Matcher, NORMAL_PRIORITY, NO_PRIORITY, Plugin
from streamlink.utils.l10n import Localization
from streamlink.utils.module import load_module
from streamlink.utils.url import update_scheme
# Ensure that the Logger class returned is Streamslink's for using the API (for backwards compatibility)
logging.setLoggerClass(StreamlinkLogger)
log = logging.getLogger(__name__)
class PythonDeprecatedWarning(UserWarning):
pass
class Streamlink:
"""A Streamlink session is used to keep track of plugins,
options and log settings."""
def __init__(self, options=None):
self.http = HTTPSession()
self.options = Options({
"interface": None,
"ipv4": False,
"ipv6": False,
"hds-live-edge": 10.0,
"hls-live-edge": 3,
"hls-segment-ignore-names": [],
"hls-segment-stream-data": False,
"hls-playlist-reload-attempts": 3,
"hls-playlist-reload-time": "default",
"hls-start-offset": 0,
"hls-duration": None,
"ringbuffer-size": 1024 * 1024 * 16, # 16 MB
"rtmp-rtmpdump": is_win32 and "rtmpdump.exe" or "rtmpdump",
"rtmp-proxy": None,
"stream-segment-attempts": 3,
"stream-segment-threads": 1,
"stream-segment-timeout": 10.0,
"stream-timeout": 60.0,
"subprocess-errorlog": False,
"subprocess-errorlog-path": None,
"ffmpeg-ffmpeg": None,
"ffmpeg-fout": None,
"ffmpeg-video-transcode": None,
"ffmpeg-audio-transcode": None,
"ffmpeg-copyts": False,
"ffmpeg-start-at-zero": False,
"mux-subtitles": False,
"locale": None,
"user-input-requester": None
})
if options:
self.options.update(options)
self.plugins: Dict[str, Type[Plugin]] = OrderedDict({})
self.load_builtin_plugins()
def set_option(self, key, value):
"""Sets general options used by plugins and streams originating
from this session object.
:param key: key of the option
:param value: value to set the option to
**Available options**:
======================== =========================================
interface (str) Set the network interface,
default: ``None``
ipv4 (bool) Resolve address names to IPv4 only.
This option overrides ipv6, default: ``False``
ipv6 (bool) Resolve address names to IPv6 only.
This option overrides ipv4, default: ``False``
hds-live-edge (float) Specify the time live HDS
streams will start from the edge of
stream, default: ``10.0``
hls-live-edge (int) How many segments from the end
to start live streams on, default: ``3``
hls-segment-ignore-names (str[]) List of segment names without
file endings which should get filtered out,
default: ``[]``
hls-segment-stream-data (bool) Stream HLS segment downloads,
default: ``False``
http-proxy (str) Specify a HTTP proxy to use for
all HTTP requests
https-proxy (str) Specify a HTTPS proxy to use for
all HTTPS requests
http-cookies (dict or str) A dict or a semi-colon (;)
delimited str of cookies to add to each
HTTP request, e.g. ``foo=bar;baz=qux``
http-headers (dict or str) A dict or semi-colon (;)
delimited str of headers to add to each
HTTP request, e.g. ``foo=bar;baz=qux``
http-query-params (dict or str) A dict or a ampersand (&)
delimited string of query parameters to
add to each HTTP request,
e.g. ``foo=bar&baz=qux``
http-trust-env (bool) Trust HTTP settings set in the
environment, such as environment
variables (HTTP_PROXY, etc) and
~/.netrc authentication
http-ssl-verify (bool) Verify SSL certificates,
default: ``True``
http-ssl-cert (str or tuple) SSL certificate to use,
can be either a .pem file (str) or a
.crt/.key pair (tuple)
http-timeout (float) General timeout used by all HTTP
requests except the ones covered by
other options, default: ``20.0``
subprocess-errorlog (bool) Log errors from subprocesses to
a file located in the temp directory
subprocess-errorlog-path (str) Log errors from subprocesses to
a specific file
ringbuffer-size (int) The size of the internal ring
buffer used by most stream types,
default: ``16777216`` (16MB)
rtmp-proxy (str) Specify a proxy (SOCKS) that RTMP
streams will use
rtmp-rtmpdump (str) Specify the location of the
rtmpdump executable used by RTMP streams,
e.g. ``/usr/local/bin/rtmpdump``
ffmpeg-ffmpeg (str) Specify the location of the
ffmpeg executable use by Muxing streams
e.g. ``/usr/local/bin/ffmpeg``
ffmpeg-verbose (bool) Log stderr from ffmpeg to the
console
ffmpeg-verbose-path (str) Specify the location of the
ffmpeg stderr log file
ffmpeg-fout (str) The output file format
when muxing with ffmpeg
e.g. ``matroska``
ffmpeg-video-transcode (str) The codec to use if transcoding
video when muxing with ffmpeg
e.g. ``h264``
ffmpeg-audio-transcode (str) The codec to use if transcoding
audio when muxing with ffmpeg
e.g. ``aac``
ffmpeg-copyts (bool) When used with ffmpeg, do not shift input timestamps.
ffmpeg-start-at-zero (bool) When used with ffmpeg and copyts,
shift input timestamps so they start at zero
default: ``False``
mux-subtitles (bool) Mux available subtitles into the
output stream.
stream-segment-attempts (int) How many attempts should be done
to download each segment, default: ``3``.
stream-segment-threads (int) The size of the thread pool used
to download segments, default: ``1``.
stream-segment-timeout (float) Segment connect and read
timeout, default: ``10.0``.
stream-timeout (float) Timeout for reading data from
stream, default: ``60.0``.
locale (str) Locale setting, in the RFC 1766 format
eg. en_US or es_ES
default: ``system locale``.
user-input-requester (UserInputRequester) instance of UserInputRequester
to collect input from the user at runtime. Must be
set before the plugins are loaded.
default: ``UserInputRequester``.
======================== =========================================
"""
if key == "interface":
for scheme, adapter in self.http.adapters.items():
if scheme not in ("http://", "https://"):
continue
if not value:
adapter.poolmanager.connection_pool_kw.pop("source_address")
else:
adapter.poolmanager.connection_pool_kw.update(
# https://docs.python.org/3/library/socket.html#socket.create_connection
source_address=(value, 0)
)
self.options.set(key, None if not value else value)
elif key == "ipv4" or key == "ipv6":
self.options.set(key, value)
if value:
self.options.set("ipv6" if key == "ipv4" else "ipv4", False)
urllib3_connection.allowed_gai_family = \
(lambda: AF_INET) if key == "ipv4" else (lambda: AF_INET6)
else:
urllib3_connection.allowed_gai_family = allowed_gai_family
elif key in ("http-proxy", "https-proxy"):
self.http.proxies["http"] = update_scheme("https://", value, force=False)
self.http.proxies["https"] = self.http.proxies["http"]
if key == "https-proxy":
log.info("The https-proxy option has been deprecated in favour of a single http-proxy option")
elif key == "http-cookies":
if isinstance(value, dict):
self.http.cookies.update(value)
else:
self.http.parse_cookies(value)
elif key == "http-headers":
if isinstance(value, dict):
self.http.headers.update(value)
else:
self.http.parse_headers(value)
elif key == "http-query-params":
if isinstance(value, dict):
self.http.params.update(value)
else:
self.http.parse_query_params(value)
elif key == "http-trust-env":
self.http.trust_env = value
elif key == "http-ssl-verify":
self.http.verify = value
elif key == "http-disable-dh":
if value:
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS += ':!DH'
try:
requests.packages.urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST = \
requests.packages.urllib3.util.ssl_.DEFAULT_CIPHERS.encode("ascii")
except AttributeError:
# no ssl to disable the cipher on
pass
elif key == "http-ssl-cert":
self.http.cert = value
elif key == "http-timeout":
self.http.timeout = value
# deprecated: {dash,hds,hls}-segment-attempts
elif key in ("dash-segment-attempts", "hds-segment-attempts", "hls-segment-attempts"):
self.options.set("stream-segment-attempts", int(value))
# deprecated: {dash,hds,hls}-segment-threads
elif key in ("dash-segment-threads", "hds-segment-threads", "hls-segment-threads"):
self.options.set("stream-segment-threads", int(value))
# deprecated: {dash,hds,hls}-segment-timeout
elif key in ("dash-segment-timeout", "hds-segment-timeout", "hls-segment-timeout"):
self.options.set("stream-segment-timeout", float(value))
# deprecated: {hds,hls,rtmp,dash,http-stream}-timeout
elif key in ("dash-timeout", "hds-timeout", "hls-timeout", "http-stream-timeout", "rtmp-timeout"):
self.options.set("stream-timeout", float(value))
else:
self.options.set(key, value)
def get_option(self, key):
"""Returns current value of specified option.
:param key: key of the option
"""
if key == "http-proxy":
return self.http.proxies.get("http")
elif key == "https-proxy":
return self.http.proxies.get("https")
elif key == "http-cookies":
return self.http.cookies
elif key == "http-headers":
return self.http.headers
elif key == "http-query-params":
return self.http.params
elif key == "http-trust-env":
return self.http.trust_env
elif key == "http-ssl-verify":
return self.http.verify
elif key == "http-ssl-cert":
return self.http.cert
elif key == "http-timeout":
return self.http.timeout
else:
return self.options.get(key)
def set_plugin_option(self, plugin, key, value):
"""Sets plugin specific options used by plugins originating
from this session object.
:param plugin: name of the plugin
:param key: key of the option
:param value: value to set the option to
"""
if plugin in self.plugins:
plugin = self.plugins[plugin]
plugin.set_option(key, value)
def get_plugin_option(self, plugin, key):
"""Returns current value of plugin specific option.
:param plugin: name of the plugin
:param key: key of the option
"""
if plugin in self.plugins:
plugin = self.plugins[plugin]
return plugin.get_option(key)
@lru_cache(maxsize=128)
def resolve_url(self, url: str, follow_redirect: bool = True) -> Plugin:
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
:param follow_redirect: follow redirects
"""
url = update_scheme("https://", url, force=False)
matcher: Matcher
candidate: Optional[Type[Plugin]] = None
priority = NO_PRIORITY
for name, plugin in self.plugins.items():
if plugin.matchers:
for matcher in plugin.matchers:
if matcher.priority > priority and matcher.pattern.match(url) is not None:
candidate = plugin
priority = matcher.priority
# TODO: remove deprecated plugin resolver
elif hasattr(plugin, "can_handle_url") and callable(plugin.can_handle_url) and plugin.can_handle_url(url):
prio = plugin.priority(url) if hasattr(plugin, "priority") and callable(plugin.priority) else NORMAL_PRIORITY
if prio > priority:
log.info(f"Resolved plugin {name} with deprecated can_handle_url API")
candidate = plugin
priority = prio
if candidate:
return candidate(url)
if follow_redirect:
# Attempt to handle a redirect URL
try:
res = self.http.head(url, allow_redirects=True, acceptable_status=[501])
# Fall back to GET request if server doesn't handle HEAD.
if res.status_code == 501:
res = self.http.get(url, stream=True)
if res.url != url:
return self.resolve_url(res.url, follow_redirect=follow_redirect)
except PluginError:
pass
raise NoPluginError
def resolve_url_no_redirect(self, url):
"""Attempts to find a plugin that can use this URL.
The default protocol (http) will be prefixed to the URL if
not specified.
Raises :exc:`NoPluginError` on failure.
:param url: a URL to match against loaded plugins
"""
return self.resolve_url(url, follow_redirect=False)
def streams(self, url, **params):
"""Attempts to find a plugin and extract streams from the *url*.
*params* are passed to :func:`Plugin.streams`.
Raises :exc:`NoPluginError` if no plugin is found.
"""
plugin = self.resolve_url(url)
return plugin.streams(**params)
def get_plugins(self):
"""Returns the loaded plugins for the session."""
return self.plugins
def load_builtin_plugins(self):
self.load_plugins(plugins.__path__[0])
def load_plugins(self, path: str) -> bool:
"""Attempt to load plugins from the path specified.
:param path: full path to a directory where to look for plugins
:return: success
"""
success = False
user_input_requester = self.get_option("user-input-requester")
for loader, name, ispkg in pkgutil.iter_modules([path]):
# set the full plugin module name
module_name = f"streamlink.plugins.{name}"
try:
mod = load_module(module_name, path)
except ImportError:
log.exception(f"Failed to load plugin {name} from {path}\n")
continue
if not hasattr(mod, "__plugin__") or not issubclass(mod.__plugin__, Plugin):
continue
success = True
plugin = mod.__plugin__
plugin.bind(self, name, user_input_requester)
if plugin.module in self.plugins:
log.debug(f"Plugin {plugin.module} is being overridden by {mod.__file__}")
self.plugins[plugin.module] = plugin
return success
@property
def version(self):
return __version__
@property
def localization(self):
return Localization(self.get_option("locale"))
__all__ = ["Streamlink"]
| 40.215517
| 125
| 0.542551
|
9337bcfaa487cb162c25b2c3deb8c77e364b5755
| 395
|
py
|
Python
|
setup.py
|
tpatten/pointnet.pytorch
|
7952502cf349a9d0d1c9751a9ae9231c09e4f37a
|
[
"MIT"
] | null | null | null |
setup.py
|
tpatten/pointnet.pytorch
|
7952502cf349a9d0d1c9751a9ae9231c09e4f37a
|
[
"MIT"
] | null | null | null |
setup.py
|
tpatten/pointnet.pytorch
|
7952502cf349a9d0d1c9751a9ae9231c09e4f37a
|
[
"MIT"
] | null | null | null |
# install using 'pip install -e .'
from setuptools import setup
#setup(name='pointnet',
# packages=['pointnet'],
# package_dir={'pointnet': 'pointnet'},
# install_requires=['torch',
# 'tqdm',
# 'plyfile'],
# version='0.0.1')
setup(name='pointnet',
packages=['pointnet'],
package_dir={'pointnet': 'pointnet'}
)
| 23.235294
| 44
| 0.531646
|
01d9ccd4603ad5ff43240eb5b2e1e63297a8544f
| 4,385
|
py
|
Python
|
scripts/onnx_generator/OperatorTypeResolver.py
|
mdhimes/cONNXr
|
986e9658ac9fafa38fb7939e2142dd213f95acce
|
[
"MIT"
] | null | null | null |
scripts/onnx_generator/OperatorTypeResolver.py
|
mdhimes/cONNXr
|
986e9658ac9fafa38fb7939e2142dd213f95acce
|
[
"MIT"
] | null | null | null |
scripts/onnx_generator/OperatorTypeResolver.py
|
mdhimes/cONNXr
|
986e9658ac9fafa38fb7939e2142dd213f95acce
|
[
"MIT"
] | null | null | null |
from .Template import Template
class CaseSkip(Template):
_template = '''
/* skip non tensor constraint '{constraint}' ('{original}') */
'''
def __init__(self, constraint, original):
self.constraint = constraint
self.original = original
class CaseSwitch(Template):
_template = '''
case {case}: {{ {switch} break; }}
'''
def __init__(self, schema, case, permutationsMap):
self.case = case
self.switch = Switch(schema, permutationsMap)
class CaseExecuter(Template):
_template = '''
case {case}: {{ executer = (operator_executer) &{operator_name}__{typePermutationText}; break; }}
'''
def __init__(self, case, operator_name, typePermutationText):
self.case = case
self.operator_name = operator_name
self.typePermutationText = typePermutationText
class Type(Template):
_template = '''
uint32_t {constraint} = 0;
if (ctx->{inOrOutput}[{name}]) {{
{constraint} = ctx->{inOrOutput}[{name}]->data_type;
}}
'''
def __init__(self, constraint, inOrOutput, name):
self.constraint = constraint
self.inOrOutput = inOrOutput
self.name = name
class Switch(Template):
_template = '''
switch ( {constraint} ) {{
case 0: //constrained tensor is not set (maybe optional?), just take next case
{cases}
default: {{
fprintf(stderr, "no matching type for {schema.operator_name} and constraint '{constraint}' with type '%s' found!\\n",operator_info_tensorType2str({constraint}));
break;
}}
}}
'''
def __init__(self, schema, permutationMap):
self.schema = schema
self.constraint = list(permutationMap.keys())[0][-1][0]
cases = []
for k,v in permutationMap.items():
case = k[-1][1].onnxTensorDataTypes()
if not case:
cases.append(CaseSkip(k[-1][0],k[-1][1].original))
continue
if not v:
operator_name = self.schema.operator_name
typePermutationText = self.schema.constraints.typePermutationText(k)
cases.append(CaseExecuter(case[0],operator_name,typePermutationText))
else:
cases.append(CaseSwitch( schema, case[0],v))
self.cases = "\n".join(map(str,cases))
class Resolve(Template):
_template = '''
{{
{types}
{switch}
}}
'''
def __init__(self, schema):
self.schema = schema
resolveTypes = []
for constraint in filter(lambda x: x.input, self.schema.constraints.values()):
inOrOutput = None
name = None
for idx, input in enumerate(self.schema.inputs):
if constraint.name != input.constraint:
continue
inOrOutput = "inputs"
name = idx
if input.optional:
continue
break
else:
for idx, output in enumerate(self.schema.outputs):
if constraint.name != output.constraint:
continue
inOrOutput = "outputs"
name = idx
if output.optional:
continue
break
resolveTypes.append(Type(constraint.name,inOrOutput,name))
permutationsMap = schema.constraints.typePermutationsMap(filterInput=True)
self.types = "\n".join([ str(t) for t in resolveTypes ])
if permutationsMap:
self.switch = Switch(schema, permutationsMap)
else:
self.types = "/* skipping constraint check, because no constraint exist */"
self.switch = f"executer = &{schema.operator_name};"
class Source(Template):
_basepath = "{path}"
_filepath = "{schema.domain}/resolve_{schema.operator_name}.c"
_template = '''
//this file was generated by {scriptpath}
#include "operators/{schema.domain}/{schema.operator_name}.h"
#include "operators/operator_stub.h"
#include <inttypes.h>
#include <stdio.h>
operator_executer resolve_{schema.operator_name}(
node_context *ctx
){{
operator_executer executer = NULL;
{switch}
if (!executer) {{
executer = &operator_stub;
}}
return executer;
}}
'''
def __init__(self, schema, path):
self.schema = schema
self.path = path
self.switch = Resolve(self.schema)
| 32.007299
| 169
| 0.594983
|
2aca687da047fc03f2d8cd24576d17d97b10bb50
| 794
|
py
|
Python
|
LeetCode/HouseRobberIII.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
LeetCode/HouseRobberIII.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
LeetCode/HouseRobberIII.py
|
SelvorWhim/competitive
|
b9daaf21920d6f7669dc0c525e903949f4e33b62
|
[
"Unlicense"
] | null | null | null |
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
dp = {}
def rob(self, root: TreeNode, can_pick_root: bool = True) -> int:
if root == None:
return 0
# use memoization to prevent exponential growth
if (root, can_pick_root) in self.dp:
return self.dp[(root, can_pick_root)]
with_root = root.val + self.rob(root.left, False) + self.rob(root.right, False) if can_pick_root else 0
without_root = self.rob(root.left, True) + self.rob(root.right, True)
ret = max(with_root, without_root)
self.dp[(root, can_pick_root)] = ret # memoize
return ret
| 39.7
| 111
| 0.609572
|
ea4b5cfb61de8aebfdf8a87097ec11c0ab5c524f
| 2,508
|
py
|
Python
|
test/functional/mempool_limit.py
|
Nomadic-Official/nomadic-nomd-coin
|
e9f758e5d43b9024c1ef876c2888934be08d4edd
|
[
"MIT"
] | null | null | null |
test/functional/mempool_limit.py
|
Nomadic-Official/nomadic-nomd-coin
|
e9f758e5d43b9024c1ef876c2888934be08d4edd
|
[
"MIT"
] | null | null | null |
test/functional/mempool_limit.py
|
Nomadic-Official/nomadic-nomd-coin
|
e9f758e5d43b9024c1ef876c2888934be08d4edd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool limiting together/eviction with the wallet."""
from test_framework.test_framework import NomadicTestFramework
from test_framework.util import *
class MempoolLimitTest(NomadicTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [["-maxmempool=5", "-spendzeroconfchange=0"]]
def run_test(self):
txouts = gen_return_txouts()
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
self.log.info('Check that mempoolminfee is minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_equal(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
txids = []
utxos = create_confirmed_utxos(relayfee, self.nodes[0], 91)
self.log.info('Create a mempool tx that will be evicted')
us0 = utxos.pop()
inputs = [{ "txid" : us0["txid"], "vout" : us0["vout"]}]
outputs = {self.nodes[0].getnewaddress() : 0.0001}
tx = self.nodes[0].createrawtransaction(inputs, outputs)
self.nodes[0].settxfee(relayfee) # specifically fund this tx with low fee
txF = self.nodes[0].fundrawtransaction(tx)
self.nodes[0].settxfee(0) # return to automatic fee selection
txFS = self.nodes[0].signrawtransaction(txF['hex'])
txid = self.nodes[0].sendrawtransaction(txFS['hex'])
relayfee = self.nodes[0].getnetworkinfo()['relayfee']
base_fee = relayfee*100
for i in range (3):
txids.append([])
txids[i] = create_lots_of_big_transactions(self.nodes[0], txouts, utxos[30*i:30*i+30], 30, (i+1)*base_fee)
self.log.info('The tx should be evicted by now')
assert(txid not in self.nodes[0].getrawmempool())
txdata = self.nodes[0].gettransaction(txid)
assert(txdata['confirmations'] == 0) #confirmation should still be 0
self.log.info('Check that mempoolminfee is larger than minrelytxfee')
assert_equal(self.nodes[0].getmempoolinfo()['minrelaytxfee'], Decimal('0.00001000'))
assert_greater_than(self.nodes[0].getmempoolinfo()['mempoolminfee'], Decimal('0.00001000'))
if __name__ == '__main__':
MempoolLimitTest().main()
| 45.6
| 118
| 0.672249
|
ad22cefc68168db6de09463aab8253d08d627567
| 21,662
|
py
|
Python
|
qa/rpc-tests/test_framework/util.py
|
pw512/scribe
|
3dbe0da8d5a71a4867d0274e67c5b2a56431efd4
|
[
"MIT"
] | 4
|
2020-01-04T08:49:56.000Z
|
2021-11-01T20:59:47.000Z
|
qa/rpc-tests/test_framework/util.py
|
pw512/scribe
|
3dbe0da8d5a71a4867d0274e67c5b2a56431efd4
|
[
"MIT"
] | 1
|
2020-08-13T07:56:11.000Z
|
2020-08-13T10:35:11.000Z
|
qa/rpc-tests/test_framework/util.py
|
pw512/scribe
|
3dbe0da8d5a71a4867d0274e67c5b2a56431efd4
|
[
"MIT"
] | 5
|
2019-11-12T10:23:39.000Z
|
2021-08-08T01:27:47.000Z
|
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Copyright (c) 2014-2017 The Dash Core developers
# Copyright (c) 2018 The Scribe Core developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Helpful routines for regression testing
#
# Add python-bitcoinrpc to module search path:
import os
import sys
from binascii import hexlify, unhexlify
from base64 import b64encode
from decimal import Decimal, ROUND_DOWN
import json
import random
import shutil
import subprocess
import time
import re
import errno
from . import coverage
from .authproxy import AuthServiceProxy, JSONRPCException
COVERAGE_DIR = None
#Set Mocktime default to OFF.
#MOCKTIME is only needed for scripts that use the
#cached version of the blockchain. If the cached
#version of the blockchain is used without MOCKTIME
#then the mempools will not sync due to IBD.
MOCKTIME = 0
def enable_mocktime():
#For backwared compatibility of the python scripts
#with previous versions of the cache, set MOCKTIME
#to regtest genesis time + (201 * 156)
global MOCKTIME
MOCKTIME = 1417713337 + (201 * 156)
def disable_mocktime():
global MOCKTIME
MOCKTIME = 0
def get_mocktime():
return MOCKTIME
def enable_coverage(dirname):
"""Maintain a log of which RPC calls are made during testing."""
global COVERAGE_DIR
COVERAGE_DIR = dirname
def get_rpc_proxy(url, node_number, timeout=None):
"""
Args:
url (str): URL of the RPC server to call
node_number (int): the node number (or id) that this calls to
Kwargs:
timeout (int): HTTP timeout in seconds
Returns:
AuthServiceProxy. convenience object for making RPC calls.
"""
proxy_kwargs = {}
if timeout is not None:
proxy_kwargs['timeout'] = timeout
proxy = AuthServiceProxy(url, **proxy_kwargs)
proxy.url = url # store URL on proxy for info
coverage_logfile = coverage.get_filename(
COVERAGE_DIR, node_number) if COVERAGE_DIR else None
return coverage.AuthServiceProxyWrapper(proxy, coverage_logfile)
def get_mnsync_status(node):
result = node.mnsync("status")
return result['IsSynced']
def wait_to_sync(node):
synced = False
while not synced:
synced = get_mnsync_status(node)
time.sleep(0.5)
def p2p_port(n):
return 11000 + n + os.getpid()%999
def rpc_port(n):
return 12000 + n + os.getpid()%999
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def count_bytes(hex_string):
return len(bytearray.fromhex(hex_string))
def bytes_to_hex_str(byte_str):
return hexlify(byte_str).decode('ascii')
def hex_str_to_bytes(hex_str):
return unhexlify(hex_str.encode('ascii'))
def str_to_b64str(string):
return b64encode(string.encode('utf-8')).decode('ascii')
def sync_blocks(rpc_connections, wait=1):
"""
Wait until everybody has the same block count
"""
while True:
counts = [ x.getblockcount() for x in rpc_connections ]
if counts == [ counts[0] ]*len(counts):
break
time.sleep(wait)
def sync_mempools(rpc_connections, wait=1):
"""
Wait until everybody has the same transactions in their memory
pools
"""
while True:
pool = set(rpc_connections[0].getrawmempool())
num_match = 1
for i in range(1, len(rpc_connections)):
if set(rpc_connections[i].getrawmempool()) == pool:
num_match = num_match+1
if num_match == len(rpc_connections):
break
time.sleep(wait)
def sync_masternodes(rpc_connections):
for node in rpc_connections:
wait_to_sync(node)
bitcoind_processes = {}
def initialize_datadir(dirname, n):
datadir = os.path.join(dirname, "node"+str(n))
if not os.path.isdir(datadir):
os.makedirs(datadir)
with open(os.path.join(datadir, "scribe.conf"), 'w') as f:
f.write("regtest=1\n")
f.write("rpcuser=rt\n")
f.write("rpcpassword=rt\n")
f.write("port="+str(p2p_port(n))+"\n")
f.write("rpcport="+str(rpc_port(n))+"\n")
f.write("listenonion=0\n")
return datadir
def rpc_url(i, rpchost=None):
return "http://rt:rt@%s:%d" % (rpchost or '127.0.0.1', rpc_port(i))
def wait_for_bitcoind_start(process, url, i):
'''
Wait for scribed to start. This means that RPC is accessible and fully initialized.
Raise an exception if scribed exits during initialization.
'''
while True:
if process.poll() is not None:
raise Exception('scribed exited with status %i during initialization' % process.returncode)
try:
rpc = get_rpc_proxy(url, i)
blocks = rpc.getblockcount()
break # break out of loop on success
except IOError as e:
if e.errno != errno.ECONNREFUSED: # Port not yet open?
raise # unknown IO error
except JSONRPCException as e: # Initialization phase
if e.error['code'] != -28: # RPC in warmup?
raise # unkown JSON RPC exception
time.sleep(0.25)
def initialize_chain(test_dir):
"""
Create (or copy from cache) a 200-block-long chain and
4 wallets.
"""
if (not os.path.isdir(os.path.join("cache","node0"))
or not os.path.isdir(os.path.join("cache","node1"))
or not os.path.isdir(os.path.join("cache","node2"))
or not os.path.isdir(os.path.join("cache","node3"))):
#find and delete old cache directories if any exist
for i in range(4):
if os.path.isdir(os.path.join("cache","node"+str(i))):
shutil.rmtree(os.path.join("cache","node"+str(i)))
# Create cache directories, run scribeds:
for i in range(4):
datadir=initialize_datadir("cache", i)
args = [ os.getenv("SCRIBED", "scribed"), "-server", "-keypool=1", "-datadir="+datadir, "-discover=0" ]
if i > 0:
args.append("-connect=127.0.0.1:"+str(p2p_port(0)))
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print "initialize_chain: scribed started, waiting for RPC to come up"
wait_for_bitcoind_start(bitcoind_processes[i], rpc_url(i), i)
if os.getenv("PYTHON_DEBUG", ""):
print "initialize_chain: RPC succesfully started"
rpcs = []
for i in range(4):
try:
rpcs.append(get_rpc_proxy(rpc_url(i), i))
except:
sys.stderr.write("Error connecting to "+url+"\n")
sys.exit(1)
# Create a 200-block-long chain; each of the 4 nodes
# gets 25 mature blocks and 25 immature.
# blocks are created with timestamps 156 seconds apart
# starting from 31356 seconds in the past
enable_mocktime()
block_time = get_mocktime() - (201 * 156)
for i in range(2):
for peer in range(4):
for j in range(25):
set_node_times(rpcs, block_time)
rpcs[peer].generate(1)
block_time += 156
# Must sync before next peer starts generating blocks
sync_blocks(rpcs)
# Shut them down, and clean up cache directories:
stop_nodes(rpcs)
wait_bitcoinds()
disable_mocktime()
for i in range(4):
os.remove(log_filename("cache", i, "debug.log"))
os.remove(log_filename("cache", i, "db.log"))
os.remove(log_filename("cache", i, "peers.dat"))
os.remove(log_filename("cache", i, "fee_estimates.dat"))
for i in range(4):
from_dir = os.path.join("cache", "node"+str(i))
to_dir = os.path.join(test_dir, "node"+str(i))
shutil.copytree(from_dir, to_dir)
initialize_datadir(test_dir, i) # Overwrite port/rpcport in scribe.conf
def initialize_chain_clean(test_dir, num_nodes):
"""
Create an empty blockchain and num_nodes wallets.
Useful if a test case wants complete control over initialization.
"""
for i in range(num_nodes):
datadir=initialize_datadir(test_dir, i)
def _rpchost_to_args(rpchost):
'''Convert optional IP:port spec to rpcconnect/rpcport args'''
if rpchost is None:
return []
match = re.match('(\[[0-9a-fA-f:]+\]|[^:]+)(?::([0-9]+))?$', rpchost)
if not match:
raise ValueError('Invalid RPC host spec ' + rpchost)
rpcconnect = match.group(1)
rpcport = match.group(2)
if rpcconnect.startswith('['): # remove IPv6 [...] wrapping
rpcconnect = rpcconnect[1:-1]
rv = ['-rpcconnect=' + rpcconnect]
if rpcport:
rv += ['-rpcport=' + rpcport]
return rv
def start_node(i, dirname, extra_args=None, rpchost=None, timewait=None, binary=None):
"""
Start a scribed and return RPC connection to it
"""
datadir = os.path.join(dirname, "node"+str(i))
if binary is None:
binary = os.getenv("SCRIBED", "scribed")
# RPC tests still depend on free transactions
args = [ binary, "-datadir="+datadir, "-server", "-keypool=1", "-discover=0", "-rest", "-blockprioritysize=50000", "-mocktime="+str(get_mocktime()) ]
if extra_args is not None: args.extend(extra_args)
bitcoind_processes[i] = subprocess.Popen(args)
if os.getenv("PYTHON_DEBUG", ""):
print "start_node: scribed started, waiting for RPC to come up"
url = rpc_url(i, rpchost)
wait_for_bitcoind_start(bitcoind_processes[i], url, i)
if os.getenv("PYTHON_DEBUG", ""):
print "start_node: RPC succesfully started"
proxy = get_rpc_proxy(url, i, timeout=timewait)
if COVERAGE_DIR:
coverage.write_all_rpc_commands(COVERAGE_DIR, proxy)
return proxy
def start_nodes(num_nodes, dirname, extra_args=None, rpchost=None, binary=None):
"""
Start multiple scribeds, return RPC connections to them
"""
if extra_args is None: extra_args = [ None for i in range(num_nodes) ]
if binary is None: binary = [ None for i in range(num_nodes) ]
rpcs = []
try:
for i in range(num_nodes):
rpcs.append(start_node(i, dirname, extra_args[i], rpchost, binary=binary[i]))
except: # If one node failed to start, stop the others
stop_nodes(rpcs)
raise
return rpcs
def log_filename(dirname, n_node, logname):
return os.path.join(dirname, "node"+str(n_node), "regtest", logname)
def stop_node(node, i):
node.stop()
bitcoind_processes[i].wait()
del bitcoind_processes[i]
def stop_nodes(nodes):
for node in nodes:
node.stop()
del nodes[:] # Emptying array closes connections as a side effect
def set_node_times(nodes, t):
for node in nodes:
node.setmocktime(t)
def wait_bitcoinds():
# Wait for all bitcoinds to cleanly exit
for bitcoind in bitcoind_processes.values():
bitcoind.wait()
bitcoind_processes.clear()
def connect_nodes(from_connection, node_num):
ip_port = "127.0.0.1:"+str(p2p_port(node_num))
from_connection.addnode(ip_port, "onetry")
# poll until version handshake complete to avoid race conditions
# with transaction relaying
while any(peer['version'] == 0 for peer in from_connection.getpeerinfo()):
time.sleep(0.1)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
def find_output(node, txid, amount):
"""
Return index to output of txid with value amount
Raises exception if there is none.
"""
txdata = node.getrawtransaction(txid, 1)
for i in range(len(txdata["vout"])):
if txdata["vout"][i]["value"] == amount:
return i
raise RuntimeError("find_output txid %s : %s not found"%(txid,str(amount)))
def gather_inputs(from_node, amount_needed, confirmations_required=1):
"""
Return a random set of unspent txouts that are enough to pay amount_needed
"""
assert(confirmations_required >=0)
utxo = from_node.listunspent(confirmations_required)
random.shuffle(utxo)
inputs = []
total_in = Decimal("0.00000000")
while total_in < amount_needed and len(utxo) > 0:
t = utxo.pop()
total_in += t["amount"]
inputs.append({ "txid" : t["txid"], "vout" : t["vout"], "address" : t["address"] } )
if total_in < amount_needed:
raise RuntimeError("Insufficient funds: need %d, have %d"%(amount_needed, total_in))
return (total_in, inputs)
def make_change(from_node, amount_in, amount_out, fee):
"""
Create change output(s), return them
"""
outputs = {}
amount = amount_out+fee
change = amount_in - amount
if change > amount*2:
# Create an extra change output to break up big inputs
change_address = from_node.getnewaddress()
# Split change in two, being careful of rounding:
outputs[change_address] = Decimal(change/2).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
change = amount_in - amount - outputs[change_address]
if change > 0:
outputs[from_node.getnewaddress()] = change
return outputs
def send_zeropri_transaction(from_node, to_node, amount, fee):
"""
Create&broadcast a zero-priority transaction.
Returns (txid, hex-encoded-txdata)
Ensures transaction is zero-priority by first creating a send-to-self,
then using its output
"""
# Create a send-to-self with confirmed inputs:
self_address = from_node.getnewaddress()
(total_in, inputs) = gather_inputs(from_node, amount+fee*2)
outputs = make_change(from_node, total_in, amount+fee, fee)
outputs[self_address] = float(amount+fee)
self_rawtx = from_node.createrawtransaction(inputs, outputs)
self_signresult = from_node.signrawtransaction(self_rawtx)
self_txid = from_node.sendrawtransaction(self_signresult["hex"], True)
vout = find_output(from_node, self_txid, amount+fee)
# Now immediately spend the output to create a 1-input, 1-output
# zero-priority transaction:
inputs = [ { "txid" : self_txid, "vout" : vout } ]
outputs = { to_node.getnewaddress() : float(amount) }
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"])
def random_zeropri_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random zero-priority transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(txid, txhex) = send_zeropri_transaction(from_node, to_node, amount, fee)
return (txid, txhex, fee)
def random_transaction(nodes, amount, min_fee, fee_increment, fee_variants):
"""
Create a random transaction.
Returns (txid, hex-encoded-transaction-data, fee)
"""
from_node = random.choice(nodes)
to_node = random.choice(nodes)
fee = min_fee + fee_increment*random.randint(0,fee_variants)
(total_in, inputs) = gather_inputs(from_node, amount+fee)
outputs = make_change(from_node, total_in, amount, fee)
outputs[to_node.getnewaddress()] = float(amount)
rawtx = from_node.createrawtransaction(inputs, outputs)
signresult = from_node.signrawtransaction(rawtx)
txid = from_node.sendrawtransaction(signresult["hex"], True)
return (txid, signresult["hex"], fee)
def assert_equal(thing1, thing2):
if thing1 != thing2:
raise AssertionError("%s != %s"%(str(thing1),str(thing2)))
def assert_greater_than(thing1, thing2):
if thing1 <= thing2:
raise AssertionError("%s <= %s"%(str(thing1),str(thing2)))
def assert_raises(exc, fun, *args, **kwds):
try:
fun(*args, **kwds)
except exc:
pass
except Exception as e:
raise AssertionError("Unexpected exception raised: "+type(e).__name__)
else:
raise AssertionError("No exception raised")
def assert_is_hex_string(string):
try:
int(string, 16)
except Exception as e:
raise AssertionError(
"Couldn't interpret %r as hexadecimal; raised: %s" % (string, e))
def assert_is_hash_string(string, length=64):
if not isinstance(string, basestring):
raise AssertionError("Expected a string, got type %r" % type(string))
elif length and len(string) != length:
raise AssertionError(
"String of length %d expected; got %d" % (length, len(string)))
elif not re.match('[abcdef0-9]+$', string):
raise AssertionError(
"String %r contains invalid characters for a hash." % string)
def assert_array_result(object_array, to_match, expected, should_not_find = False):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
If the should_not_find flag is true, to_match should not be found
in object_array
"""
if should_not_find == True:
assert_equal(expected, { })
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
elif should_not_find == True:
num_matched = num_matched+1
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0 and should_not_find != True:
raise AssertionError("No objects matched %s"%(str(to_match)))
if num_matched > 0 and should_not_find == True:
raise AssertionError("Objects were found %s"%(str(to_match)))
def satoshi_round(amount):
return Decimal(amount).quantize(Decimal('0.00000001'), rounding=ROUND_DOWN)
# Helper to create at least "count" utxos
# Pass in a fee that is sufficient for relay and mining new transactions.
def create_confirmed_utxos(fee, node, count):
node.generate(int(0.5*count)+101)
utxos = node.listunspent()
iterations = count - len(utxos)
addr1 = node.getnewaddress()
addr2 = node.getnewaddress()
if iterations <= 0:
return utxos
for i in xrange(iterations):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr1] = satoshi_round(send_value/2)
outputs[addr2] = satoshi_round(send_value/2)
raw_tx = node.createrawtransaction(inputs, outputs)
signed_tx = node.signrawtransaction(raw_tx)["hex"]
txid = node.sendrawtransaction(signed_tx)
while (node.getmempoolinfo()['size'] > 0):
node.generate(1)
utxos = node.listunspent()
assert(len(utxos) >= count)
return utxos
# Create large OP_RETURN txouts that can be appended to a transaction
# to make it large (helper for constructing large transactions).
def gen_return_txouts():
# Some pre-processing to create a bunch of OP_RETURN txouts to insert into transactions we create
# So we have big transactions (and therefore can't fit very many into each block)
# create one script_pubkey
script_pubkey = "6a4d0200" #OP_RETURN OP_PUSH2 512 bytes
for i in xrange (512):
script_pubkey = script_pubkey + "01"
# concatenate 128 txouts of above script_pubkey which we'll insert before the txout for change
txouts = "81"
for k in xrange(128):
# add txout value
txouts = txouts + "0000000000000000"
# add length of script_pubkey
txouts = txouts + "fd0402"
# add script_pubkey
txouts = txouts + script_pubkey
return txouts
def create_tx(node, coinbase, to_address, amount):
inputs = [{ "txid" : coinbase, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
assert_equal(signresult["complete"], True)
return signresult["hex"]
# Create a spend of each passed-in utxo, splicing in "txouts" to each raw
# transaction to make it large. See gen_return_txouts() above.
def create_lots_of_big_transactions(node, txouts, utxos, fee):
addr = node.getnewaddress()
txids = []
for i in xrange(len(utxos)):
t = utxos.pop()
inputs = []
inputs.append({ "txid" : t["txid"], "vout" : t["vout"]})
outputs = {}
send_value = t['amount'] - fee
outputs[addr] = satoshi_round(send_value)
rawtx = node.createrawtransaction(inputs, outputs)
newtx = rawtx[0:92]
newtx = newtx + txouts
newtx = newtx + rawtx[94:]
signresult = node.signrawtransaction(newtx, None, None, "NONE")
txid = node.sendrawtransaction(signresult["hex"], True)
txids.append(txid)
return txids
def get_bip9_status(node, key):
info = node.getblockchaininfo()
for row in info['bip9_softforks']:
if row['id'] == key:
return row
raise IndexError ('key:"%s" not found' % key)
| 35.337684
| 153
| 0.652387
|
8c3f4b1cc0afc0039d2be621a35254996da71ae4
| 6,337
|
py
|
Python
|
stdplugins/archive.py
|
IshwaranRudhara/PepeBot
|
5c0bd4c19e076bee73cf62a4dc804681045754d3
|
[
"Apache-2.0"
] | 6
|
2020-04-21T04:52:42.000Z
|
2020-06-19T09:33:02.000Z
|
stdplugins/archive.py
|
faquario/PepeBorg
|
be9be790b4e2b5456536162786dbd1ead71be024
|
[
"Apache-2.0"
] | 1
|
2021-02-08T20:49:02.000Z
|
2021-02-08T20:49:02.000Z
|
stdplugins/archive.py
|
prono69/LazyAF-Pepe
|
b78d4b9f174a65b77b6b5f4969386aa6dd3359ce
|
[
"Apache-2.0"
] | 3
|
2020-04-21T05:45:54.000Z
|
2020-05-01T15:03:40.000Z
|
"""
usage: reply with file : .rar , .7z create archived file
unzip usage: reply with zipped file .unzipper
Coded by @furki
"""
import asyncio
import os
import subprocess
import time
from sample_config import Config
from uniborg.util import admin_cmd, progress
import subprocess
import patoolib
extracted = Config.TMP_DOWNLOAD_DIRECTORY + "extracted/"
thumb_image_path = Config.TMP_DOWNLOAD_DIRECTORY + "/thumb_image.jpg"
@borg.on(admin_cmd(pattern=("rar ?(.*)")))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
mone = await event.edit("Processing ...")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
try:
c_time = time.time()
downloaded_file_name = await borg.download_media(
reply_message,
Config.TMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, mone, c_time, "trying to download")
)
)
directory_name = downloaded_file_name
await event.edit("creating rar archive, please wait..")
# patoolib.create_archive(directory_name + '.7z',directory_name)
patoolib.create_archive(directory_name + ".rar",(directory_name,Config.TMP_DOWNLOAD_DIRECTORY))
# patoolib.create_archive("/content/21.yy Avrupa (1).pdf.zip",("/content/21.yy Avrupa (1).pdf","/content/"))
await borg.send_file(
event.chat_id,
directory_name + ".rar",
caption="Rarred By @By_Azade",
force_document=True,
allow_cache=False,
reply_to=event.message.id,
)
try:
os.remove(directory_name + ".rar")
os.remove(directory_name)
except:
pass
await event.edit("Task Completed")
await asyncio.sleep(3)
await event.delete()
except Exception as e: # pylint:disable=C0103,W0703
await mone.edit(str(e))
elif input_str:
directory_name = input_str
await event.edit("Local file compressed to `{}`".format(directory_name + ".rar"))
@borg.on(admin_cmd(pattern=("7z ?(.*)")))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
mone = await event.edit("Processing ...")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
try:
c_time = time.time()
downloaded_file_name = await borg.download_media(
reply_message,
Config.TMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, mone, c_time, "trying to download")
)
)
directory_name = downloaded_file_name
await event.edit("creating 7z archive, please wait..")
# patoolib.create_archive(directory_name + '.7z',directory_name)
patoolib.create_archive(directory_name + ".7z",(directory_name,Config.TMP_DOWNLOAD_DIRECTORY))
# patoolib.create_archive("/content/21.yy Avrupa (1).pdf.zip",("/content/21.yy Avrupa (1).pdf","/content/"))
await borg.send_file(
event.chat_id,
directory_name + ".7z",
caption="7z archived By @By_Azade",
force_document=True,
allow_cache=False,
reply_to=event.message.id,
)
try:
os.remove(directory_name + ".7z")
os.remove(directory_name)
except:
pass
await event.edit("Task Completed")
await asyncio.sleep(3)
await event.delete()
except Exception as e: # pylint:disable=C0103,W0703
await mone.edit(str(e))
elif input_str:
directory_name = input_str
await event.edit("Local file compressed to `{}`".format(directory_name + ".7z"))
@borg.on(admin_cmd(pattern=("unzipper ?(.*)")))
async def _(event):
if event.fwd_from:
return
input_str = event.pattern_match.group(1)
mone = await event.edit("Processing ...")
if not os.path.isdir(Config.TMP_DOWNLOAD_DIRECTORY):
os.makedirs(Config.TMP_DOWNLOAD_DIRECTORY)
if event.reply_to_msg_id:
reply_message = await event.get_reply_message()
try:
c_time = time.time()
downloaded_file_name = await borg.download_media(
reply_message,
Config.TMP_DOWNLOAD_DIRECTORY,
progress_callback=lambda d, t: asyncio.get_event_loop().create_task(
progress(d, t, mone, c_time, "trying to download")
)
)
directory_name = downloaded_file_name
await event.edit("Finish downloading to my local")
command_to_exec = [
"7z",
"e",
"-o" + extracted,
directory_name]
sp = subprocess.Popen(command_to_exec, stderr=subprocess.STDOUT, stdout=subprocess.PIPE)
await borg.send_file(
event.chat_id,
directory_name + ".zip",
caption="Zipped By @By_Azade",
force_document=True,
allow_cache=False,
reply_to=event.message.id,
)
try:
os.remove(directory_name + ".zip")
os.remove(directory_name)
except:
pass
await event.edit("Task Completed")
await asyncio.sleep(3)
await event.delete()
except Exception as e: # pylint:disable=C0103,W0703
await mone.edit(str(e))
elif input_str:
directory_name = input_str
await event.edit("Local file compressed to `{}`".format(directory_name + ".zip"))
| 39.60625
| 120
| 0.579296
|
88e3dd48e5c771f15f0cb36d6769620033a2df28
| 4,998
|
py
|
Python
|
tests/unit/states/test_pyenv.py
|
jubrad/salt
|
7960334fb726cfde45e6409da79a65535c626685
|
[
"Apache-2.0"
] | 1
|
2021-08-14T13:48:38.000Z
|
2021-08-14T13:48:38.000Z
|
tests/unit/states/test_pyenv.py
|
jubrad/salt
|
7960334fb726cfde45e6409da79a65535c626685
|
[
"Apache-2.0"
] | 3
|
2015-03-31T14:44:05.000Z
|
2015-06-18T19:02:24.000Z
|
tests/unit/states/test_pyenv.py
|
jubrad/salt
|
7960334fb726cfde45e6409da79a65535c626685
|
[
"Apache-2.0"
] | 1
|
2020-01-02T09:03:24.000Z
|
2020-01-02T09:03:24.000Z
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Jayesh Kariya <jayeshk@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import skipIf, TestCase
from tests.support.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
# Import Salt Libs
import salt.states.pyenv as pyenv
@skipIf(NO_MOCK, NO_MOCK_REASON)
class PyenvTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.states.pyenv
'''
def setup_loader_modules(self):
return {pyenv: {}}
# 'installed' function tests: 1
def test_installed(self):
'''
Test to verify that the specified python is installed with pyenv.
'''
name = 'python-2.7.6'
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
with patch.dict(pyenv.__opts__, {'test': True}):
comt = ('python 2.7.6 is set to be installed')
ret.update({'comment': comt})
self.assertDictEqual(pyenv.installed(name), ret)
with patch.dict(pyenv.__opts__, {'test': False}):
mock_f = MagicMock(side_effect=[False, False, True])
mock_fa = MagicMock(side_effect=[False, True])
mock_str = MagicMock(return_value='2.7.6')
mock_lst = MagicMock(return_value=['2.7.6'])
with patch.dict(pyenv.__salt__, {'pyenv.is_installed': mock_f,
'pyenv.install': mock_fa,
'pyenv.default': mock_str,
'pyenv.versions': mock_lst}):
comt = ('pyenv failed to install')
ret.update({'comment': comt, 'result': False})
self.assertDictEqual(pyenv.installed(name), ret)
comt = ('Requested python exists.')
ret.update({'comment': comt, 'result': True, 'default': True})
self.assertDictEqual(pyenv.installed(name), ret)
self.assertDictEqual(pyenv.installed(name), ret)
# 'absent' function tests: 1
def test_absent(self):
'''
Test to verify that the specified python is not installed with pyenv.
'''
name = 'python-2.7.6'
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
with patch.dict(pyenv.__opts__, {'test': True}):
comt = ('python 2.7.6 is set to be uninstalled')
ret.update({'comment': comt})
self.assertDictEqual(pyenv.absent(name), ret)
with patch.dict(pyenv.__opts__, {'test': False}):
mock_f = MagicMock(side_effect=[False, True])
mock_t = MagicMock(return_value=True)
mock_str = MagicMock(return_value='2.7.6')
mock_lst = MagicMock(return_value=['2.7.6'])
with patch.dict(pyenv.__salt__, {'pyenv.is_installed': mock_f,
'pyenv.uninstall_python': mock_t,
'pyenv.default': mock_str,
'pyenv.versions': mock_lst}):
comt = ('pyenv not installed, 2.7.6 not either')
ret.update({'comment': comt, 'result': True})
self.assertDictEqual(pyenv.absent(name), ret)
comt = ('Successfully removed python')
ret.update({'comment': comt, 'result': True, 'default': True,
'changes': {'2.7.6': 'Uninstalled'}})
self.assertDictEqual(pyenv.absent(name), ret)
# 'install_pyenv' function tests: 1
def test_install_pyenv(self):
'''
Test to install pyenv if not installed.
'''
name = 'python-2.7.6'
ret = {'name': name,
'changes': {},
'result': None,
'comment': ''}
with patch.dict(pyenv.__opts__, {'test': True}):
comt = ('pyenv is set to be installed')
ret.update({'comment': comt})
self.assertDictEqual(pyenv.install_pyenv(name), ret)
with patch.dict(pyenv.__opts__, {'test': False}):
mock_t = MagicMock(return_value=True)
mock_str = MagicMock(return_value='2.7.6')
mock_lst = MagicMock(return_value=['2.7.6'])
with patch.dict(pyenv.__salt__, {'pyenv.install_python': mock_t,
'pyenv.default': mock_str,
'pyenv.versions': mock_lst}):
comt = ('Successfully installed python')
ret.update({'comment': comt, 'result': True, 'default': False,
'changes': {None: 'Installed'}})
self.assertDictEqual(pyenv.install_pyenv(name), ret)
| 37.863636
| 78
| 0.529812
|
7cbd79dbac8eb7099c255221b15fa38363cd2878
| 207
|
py
|
Python
|
src/games/domino/module/players/utils.py
|
2kodevs/cooperAItive
|
910c4c1cb356e3c404ce4b9a64c812cab3333742
|
[
"MIT"
] | 1
|
2021-09-16T12:58:24.000Z
|
2021-09-16T12:58:24.000Z
|
src/games/domino/module/players/utils.py
|
2kodevs/cooperAItive
|
910c4c1cb356e3c404ce4b9a64c812cab3333742
|
[
"MIT"
] | 3
|
2021-09-08T23:20:20.000Z
|
2022-01-30T22:45:44.000Z
|
src/games/domino/module/players/utils.py
|
2kodevs/cooperAItive
|
910c4c1cb356e3c404ce4b9a64c812cab3333742
|
[
"MIT"
] | null | null | null |
def count_min(player, piece):
cant = [0, 0]
for item in player.pieces:
cant[0] += (piece[0] in item)
cant[1] += (piece[1] in item)
val = min(cant)
return val, cant.index(val)
| 25.875
| 37
| 0.555556
|
82237e917a49cd5d076e702037cb3389798c25ed
| 6,953
|
py
|
Python
|
terraform-modules/lambda/code/cname-google/cname-google.py
|
Mellis3489/domain-protect
|
26a64a741541abb4d17b18cd53b94fc6de387b8f
|
[
"Apache-2.0"
] | null | null | null |
terraform-modules/lambda/code/cname-google/cname-google.py
|
Mellis3489/domain-protect
|
26a64a741541abb4d17b18cd53b94fc6de387b8f
|
[
"Apache-2.0"
] | null | null | null |
terraform-modules/lambda/code/cname-google/cname-google.py
|
Mellis3489/domain-protect
|
26a64a741541abb4d17b18cd53b94fc6de387b8f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import os, boto3
import logging
import json
import requests
import time
from botocore.exceptions import ClientError
from datetime import datetime
def json_serial(obj):
"""JSON serializer for objects not serializable by default json code"""
if isinstance(obj, datetime):
serial = obj.isoformat()
return serial
raise TypeError("Type not serializable")
def assume_role(account, security_audit_role_name, external_id, project, region):
security_audit_role_arn = "arn:aws:iam::" + account + ":role/" + security_audit_role_name
stsclient = boto3.client('sts')
try:
if external_id == "":
assumed_role_object = stsclient.assume_role(RoleArn = security_audit_role_arn, RoleSessionName = project)
print("Assumed " + security_audit_role_name + " role in account " + account)
else:
assumed_role_object = stsclient.assume_role(RoleArn = security_audit_role_arn, RoleSessionName = project, ExternalId = external_id)
print("Assumed " + security_audit_role_name + " role in account " + account)
except Exception:
logging.exception("ERROR: Failed to assume " + security_audit_role_name + " role in AWS account " + account)
credentials = assumed_role_object['Credentials']
aws_access_key_id = credentials["AccessKeyId"]
aws_secret_access_key = credentials["SecretAccessKey"]
aws_session_token = credentials["SessionToken"]
boto3_session = boto3.session.Session(aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, aws_session_token=aws_session_token, region_name=region)
return boto3_session
def vulnerable_cname_storage(domain_name):
try:
response = requests.get('http://' + domain_name)
if "NoSuchBucket" in response.text:
return "True"
else:
return "False"
except:
pass
try:
response = requests.get('https://' + domain_name)
if "NoSuchBucket" in response.text:
return "True"
else:
return "False"
except:
return "False"
def lambda_handler(event, context):
# set variables
region = os.environ['AWS_REGION']
org_primary_account = os.environ['ORG_PRIMARY_ACCOUNT']
security_audit_role_name = os.environ['SECURITY_AUDIT_ROLE_NAME']
external_id = os.environ['EXTERNAL_ID']
project = os.environ['PROJECT']
sns_topic_arn = os.environ['SNS_TOPIC_ARN']
vulnerable_domains = []
json_data = {"Findings": []}
boto3_session = assume_role(org_primary_account, security_audit_role_name, external_id, project, region)
client = boto3_session.client(service_name = "organizations")
try:
paginator_accounts = client.get_paginator('list_accounts')
pages_accounts = paginator_accounts.paginate()
for page_accounts in pages_accounts:
accounts = page_accounts['Accounts']
for account in accounts:
account_id = account['Id']
account_name = account['Name']
try:
boto3_session = assume_role(account_id, security_audit_role_name, external_id, project, region)
client = boto3_session.client('route53')
try:
paginator_zones = client.get_paginator('list_hosted_zones')
pages_zones = paginator_zones.paginate()
for page_zones in pages_zones:
hosted_zones = page_zones['HostedZones']
#print(json.dumps(hosted_zones, sort_keys=True, indent=2, default=json_serial))
for hosted_zone in hosted_zones:
if not hosted_zone['Config']['PrivateZone']:
time.sleep(.5)
print("Searching for CNAME records for Google Cloud Storage in hosted zone %s" % (hosted_zone['Name']) )
try:
paginator_records = client.get_paginator('list_resource_record_sets')
pages_records = paginator_records.paginate(HostedZoneId=hosted_zone['Id'], StartRecordName='_', StartRecordType='NS')
for page_records in pages_records:
record_sets = page_records['ResourceRecordSets']
#print(json.dumps(record_sets, sort_keys=True, indent=2, default=json_serial))
for record in record_sets:
if record['Type'] in ['CNAME'] and "c.storage.googleapis.com" in record['ResourceRecords'][0]['Value']:
print("checking if " + record['Name'] + " is vulnerable to takeover")
domain_name = record['Name']
try:
result = vulnerable_cname_storage(domain_name)
if result == "True":
print(domain_name + "in " + account_name + " is vulnerable")
vulnerable_domains.append(domain_name)
json_data["Findings"].append({"Account": account_name, "AccountID" : str(account_id), "Domain": domain_name})
except:
pass
except:
pass
except:
pass
except:
print("ERROR: unable to assume role in " + account_name + " account " + account_id)
except Exception:
logging.exception("ERROR: Unable to list AWS accounts across organization with primary account " + org_primary_account)
try:
print(json.dumps(json_data, sort_keys=True, indent=2, default=json_serial))
#print(json_data)
client = boto3.client('sns')
if len(vulnerable_domains) > 0:
response = client.publish(
TargetArn=sns_topic_arn,
Subject="CNAME for missing Google Cloud Storage bucket in Amazon Route53",
Message=json.dumps({'default': json.dumps(json_data)}),
MessageStructure='json'
)
print(response)
except:
logging.exception("ERROR: Unable to publish to SNS topic " + sns_topic_arn)
| 45.149351
| 180
| 0.554149
|
767deb90f60d22504fc89b06dd4dd15d06b71506
| 1,129
|
py
|
Python
|
bumblebee_status/modules/contrib/layout-xkbswitch.py
|
rosalogia/bumblebee-status
|
19c3975301d8700743df745ecd5ca2c05ecf5cf0
|
[
"MIT"
] | null | null | null |
bumblebee_status/modules/contrib/layout-xkbswitch.py
|
rosalogia/bumblebee-status
|
19c3975301d8700743df745ecd5ca2c05ecf5cf0
|
[
"MIT"
] | null | null | null |
bumblebee_status/modules/contrib/layout-xkbswitch.py
|
rosalogia/bumblebee-status
|
19c3975301d8700743df745ecd5ca2c05ecf5cf0
|
[
"MIT"
] | null | null | null |
"""Displays and changes the current keyboard layout
Requires the following executable:
* xkb-switch
contributed by `somospocos <https://github.com/somospocos>`_ - many thanks!
"""
import core.module
import core.widget
import core.decorators
import core.input
import util.cli
class Module(core.module.Module):
@core.decorators.every(seconds=60)
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.current_layout))
core.input.register(self, button=core.input.LEFT_MOUSE, cmd=self.__next_keymap)
self.__current_layout = self.__get_current_layout()
def current_layout(self, _):
return self.__current_layout
def __next_keymap(self, event):
util.cli.execute("xkb-switch -n", ignore_errors=True)
def __get_current_layout(self):
try:
res = util.cli.execute("xkb-switch")
return res.split("\n")[0]
except RuntimeError:
return ["n/a"]
def update(self):
self.__current_layout = self.__get_current_layout()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| 26.255814
| 87
| 0.690877
|
dcc7806a073603e909c953b204e0a2c1c3d6ed8f
| 8,382
|
py
|
Python
|
ykman/util.py
|
nevun/yubikey-manager
|
01ba3dac6237bb05456820176ebadd6350991c2b
|
[
"BSD-2-Clause"
] | null | null | null |
ykman/util.py
|
nevun/yubikey-manager
|
01ba3dac6237bb05456820176ebadd6350991c2b
|
[
"BSD-2-Clause"
] | null | null | null |
ykman/util.py
|
nevun/yubikey-manager
|
01ba3dac6237bb05456820176ebadd6350991c2b
|
[
"BSD-2-Clause"
] | null | null | null |
# Copyright (c) 2015 Yubico AB
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import struct
import re
import logging
import random
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.backends import default_backend
from cryptography import x509
from base64 import b32decode
from OpenSSL import crypto
from .scancodes import KEYBOARD_LAYOUT
from yubikit.core import Tlv
logger = logging.getLogger(__name__)
PEM_IDENTIFIER = b"-----BEGIN"
class Cve201715361VulnerableError(Exception):
"""Thrown if on-chip RSA key generation is attempted on a YubiKey vulnerable
to CVE-2017-15361."""
def __init__(self, f_version):
self.f_version = f_version
def __str__(self):
return (
"On-chip RSA key generation on this YubiKey has been blocked.\n"
"Please see https://yubi.co/ysa201701 for details."
)
parse_tlvs = Tlv.parse_list # Deprecated, use Tlv.parse_list directly
class MissingLibrary(object):
def __init__(self, message):
self._message = message
def __getattr__(self, name):
raise AttributeError(self._message)
_MODHEX = "cbdefghijklnrtuv"
DEFAULT_PW_CHAR_BLOCKLIST = ["\t", "\n", " "]
def modhex_encode(data):
return "".join(_MODHEX[b >> 4] + _MODHEX[b & 0xF] for b in data)
def modhex_decode(string):
return bytes(
_MODHEX.index(string[i]) << 4 | _MODHEX.index(string[i + 1])
for i in range(0, len(string), 2)
)
def ensure_not_cve201715361_vulnerable_firmware_version(f_version):
if is_cve201715361_vulnerable_firmware_version(f_version):
raise Cve201715361VulnerableError(f_version)
def is_cve201715361_vulnerable_firmware_version(f_version):
return (4, 2, 0) <= f_version < (4, 3, 5)
def generate_static_pw(
length, keyboard_layout=KEYBOARD_LAYOUT.MODHEX, blocklist=DEFAULT_PW_CHAR_BLOCKLIST
):
chars = [k for k in keyboard_layout.value.keys() if k not in blocklist]
sr = random.SystemRandom()
return "".join([sr.choice(chars) for _ in range(length)])
def format_code(code, digits=6, steam=False):
STEAM_CHAR_TABLE = "23456789BCDFGHJKMNPQRTVWXY"
if steam:
chars = []
for i in range(5):
chars.append(STEAM_CHAR_TABLE[code % len(STEAM_CHAR_TABLE)])
code //= len(STEAM_CHAR_TABLE)
return "".join(chars)
else:
return ("%%0%dd" % digits) % (code % 10 ** digits)
def parse_totp_hash(resp):
offs = resp[-1] & 0xF
return parse_truncated(resp[offs : offs + 4])
def parse_truncated(resp):
return struct.unpack(">I", resp)[0] & 0x7FFFFFFF
def hmac_shorten_key(key, algo):
if algo.upper() == "SHA1":
h = hashes.SHA1() # nosec
block_size = 64
elif algo.upper() == "SHA256":
h = hashes.SHA256()
block_size = 64
elif algo.upper() == "SHA512":
h = hashes.SHA512()
block_size = 128
else:
raise ValueError("Unsupported algorithm!")
if len(key) > block_size:
h = hashes.Hash(h, default_backend())
h.update(key)
key = h.finalize()
return key
def time_challenge(timestamp, period=30):
return struct.pack(">q", int(timestamp // period))
def parse_key(val):
val = val.upper()
if re.match(r"^([0-9A-F]{2})+$", val): # hex
return bytes.fromhex(val)
else:
# Key should be b32 encoded
return parse_b32_key(val)
def parse_b32_key(key):
key = key.upper().replace(" ", "")
key += "=" * (-len(key) % 8) # Support unpadded
return b32decode(key)
def parse_private_key(data, password):
"""
Identifies, decrypts and returns a cryptography private key object.
"""
# PEM
if is_pem(data):
if b"ENCRYPTED" in data:
if password is None:
raise TypeError("No password provided for encrypted key.")
try:
return serialization.load_pem_private_key(
data, password, backend=default_backend()
)
except ValueError:
# Cryptography raises ValueError if decryption fails.
raise
except Exception as e:
logger.debug("Failed to parse PEM private key ", exc_info=e)
# PKCS12
if is_pkcs12(data):
try:
p12 = crypto.load_pkcs12(data, password)
data = crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
return serialization.load_pem_private_key(
data, password=None, backend=default_backend()
)
except crypto.Error as e:
raise ValueError(e)
# DER
try:
return serialization.load_der_private_key(
data, password, backend=default_backend()
)
except Exception as e:
logger.debug("Failed to parse private key as DER", exc_info=e)
# All parsing failed
raise ValueError("Could not parse private key.")
def parse_certificates(data, password):
"""
Identifies, decrypts and returns list of cryptography x509 certificates.
"""
# PEM
if is_pem(data):
certs = []
for cert in data.split(PEM_IDENTIFIER):
try:
certs.append(
x509.load_pem_x509_certificate(
PEM_IDENTIFIER + cert, default_backend()
)
)
except Exception as e:
logger.debug("Failed to parse PEM certificate", exc_info=e)
# Could be valid PEM but not certificates.
if len(certs) > 0:
return certs
# PKCS12
if is_pkcs12(data):
try:
p12 = crypto.load_pkcs12(data, password)
data = crypto.dump_certificate(crypto.FILETYPE_PEM, p12.get_certificate())
return [x509.load_pem_x509_certificate(data, default_backend())]
except crypto.Error as e:
raise ValueError(e)
# DER
try:
return [x509.load_der_x509_certificate(data, default_backend())]
except Exception as e:
logger.debug("Failed to parse certificate as DER", exc_info=e)
raise ValueError("Could not parse certificate.")
def get_leaf_certificates(certs):
"""
Extracts the leaf certificates from a list of certificates. Leaf
certificates are ones whose subject does not appear as issuer among the
others.
"""
issuers = [
cert.issuer.get_attributes_for_oid(x509.NameOID.COMMON_NAME) for cert in certs
]
leafs = [
cert
for cert in certs
if (
cert.subject.get_attributes_for_oid(x509.NameOID.COMMON_NAME) not in issuers
)
]
return leafs
def is_pem(data):
return PEM_IDENTIFIER in data if data else False
def is_pkcs12(data):
"""
Tries to identify a PKCS12 container.
The PFX PDU version is assumed to be v3.
See: https://tools.ietf.org/html/rfc7292.
"""
try:
header = Tlv.parse_list(Tlv.unwrap(0x30, data))[0]
return header.tag == 0x02 and header.value == b"\x03"
except ValueError:
return False
| 30.043011
| 88
| 0.655333
|
27b0d51f432ab9eecac66cf0ccef698bb1568b41
| 1,031
|
py
|
Python
|
integration-test/helper.py
|
montaro/really
|
46888bf138c554ca844e1d9dcf7de14efc12bb86
|
[
"Apache-2.0"
] | null | null | null |
integration-test/helper.py
|
montaro/really
|
46888bf138c554ca844e1d9dcf7de14efc12bb86
|
[
"Apache-2.0"
] | null | null | null |
integration-test/helper.py
|
montaro/really
|
46888bf138c554ca844e1d9dcf7de14efc12bb86
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import print_function
import websocket
import unittest
import jwt
import datetime
import time
import json
import requests
class Helper(object):
really_server = 'ws://127.0.0.1:9000/v0.1/socket'
really_auth = 'http://127.0.0.1:8080/anonymous/login/'
@staticmethod
def get_token_outside():
secret_key = "jKZ<bf_VAT:0u`>nUcv/7<=`[wOJTqUKOlGkREwu7vdWEwXI>Yv5cO`hFA:5w5wI"
datetimeobj = datetime.datetime.now() + datetime.timedelta(2)
date_time_milis = int(time.mktime(datetimeobj.timetuple()) * 1000 + datetimeobj.microsecond / 1000)
payload = {
'uid': '1234567890',
'authType': 'anonymous',
'expires': date_time_milis,
'data': {}
}
return jwt.encode(payload, secret_key)
@staticmethod
def get_anonymous_token():
url = Helper.really_auth
headers = {'content-type': 'application/json'}
result = requests.post(url)
return json.loads(result.content)['accessToken']
| 31.242424
| 107
| 0.651794
|
245a93aaf2fac775166e5604d885409fa5cbe009
| 5,701
|
py
|
Python
|
gym_MiniCheetahEnv/envs/src/world.py
|
dhanajaya78/Mini-Cheetah---PyBullet
|
5c113c004018ff6f247f5bbddc5d1ac52601d718
|
[
"MIT"
] | 1
|
2021-12-04T15:57:52.000Z
|
2021-12-04T15:57:52.000Z
|
gym_MiniCheetahEnv/envs/src/world.py
|
dhanajaya78/Mini-Cheetah---PyBullet
|
5c113c004018ff6f247f5bbddc5d1ac52601d718
|
[
"MIT"
] | null | null | null |
gym_MiniCheetahEnv/envs/src/world.py
|
dhanajaya78/Mini-Cheetah---PyBullet
|
5c113c004018ff6f247f5bbddc5d1ac52601d718
|
[
"MIT"
] | null | null | null |
import numpy as np
import gym
from gym import spaces
import math
import cv2
import random
import time
import pybullet
import pybullet_data
from src.mini_cheetah_class import Mini_Cheetah
from src.dynamics_randomization import DynamicsRandomizer
class Terrain():
def __init__(self,render = True,on_rack = False, terrain_type = 'plane'):
self._is_render = render
self._on_rack = on_rack
if self._is_render:
pybullet.connect(pybullet.GUI)
else:
pybullet.connect(pybullet.DIRECT)
#Robot Positions
self._robot_init_pos =[0,0,0.4]
self._robot_init_ori = [0, 0, 0, 1]
#Simulation Parameters
self.dt = 0.005
self._frame_skip = 25
pybullet.resetSimulation()
pybullet.setPhysicsEngineParameter(numSolverIterations=int(300))
pybullet.setTimeStep(self.dt/self._frame_skip)
pybullet.setGravity(0, 0, -9.8)
# Load Terrain
if(terrain_type == 'plane' or terrain_type == 'stairs'):
self.plane = pybullet.loadURDF("%s/plane.urdf" % pybullet_data.getDataPath())
pybullet.changeVisualShape(self.plane,-1,rgbaColor=[1,1,1,0.9])
if(terrain_type=='stairs'):
boxHalfLength = 0.15
boxHalfWidth = 1
boxHalfHeight = 0.05
sh_colBox = pybullet.createCollisionShape(pybullet.GEOM_BOX,halfExtents=[boxHalfLength,boxHalfWidth,boxHalfHeight])
boxOrigin = 1
n_steps = 15
self.stairs = []
for i in range(n_steps):
step =pybullet.createMultiBody(baseMass=0,baseCollisionShapeIndex = sh_colBox,basePosition = [boxOrigin + i*2*boxHalfLength,0,boxHalfHeight + i*2*boxHalfHeight],baseOrientation=[0.0,0.0,0.0,1])
self.stairs.append(step)
pybullet.changeDynamics(step, -1, lateralFriction=0.8)
elif(terrain_type == 'distorted'):
numHeightfieldRows = 256
numHeightfieldColumns = 256
heightPerturbationRange = 0.06
heightfieldData = [0]*numHeightfieldRows*numHeightfieldColumns
for j in range (int(numHeightfieldColumns/2)):
for i in range (int(numHeightfieldRows/2) ):
height = random.uniform(0,heightPerturbationRange)
heightfieldData[2*i+2*j*numHeightfieldRows]=height
heightfieldData[2*i+1+2*j*numHeightfieldRows]=height
heightfieldData[2*i+(2*j+1)*numHeightfieldRows]=height
heightfieldData[2*i+1+(2*j+1)*numHeightfieldRows]=height
terrainShape = pybullet.createCollisionShape(shapeType = pybullet.GEOM_HEIGHTFIELD, meshScale=[.05,.05,1], heightfieldTextureScaling=(numHeightfieldRows-1)/2, heightfieldData=heightfieldData, numHeightfieldRows=numHeightfieldRows, numHeightfieldColumns=numHeightfieldColumns)
self.plane = pybullet.createMultiBody(0, terrainShape)
#Load Robot
self.robot = Mini_Cheetah(pybullet)
self.DynaRandom = DynamicsRandomizer(pybullet,self.robot)
#Set Camera
self._cam_dist = 1.0
self._cam_yaw = 0.0
self._cam_pitch = 0.0
pybullet.resetDebugVisualizerCamera(self._cam_dist, self._cam_yaw, self._cam_pitch, [0, 0, 0])
if self._on_rack:
self.robot._set_on_rack()
def _simulate(self,torques):
for _ in range(self._frame_skip):
self.robot._apply_motor_torques(torques)
pybullet.stepSimulation()
def _reset_world(self):
# reset the robot
self.robot._reset_base()
self.robot._reset_legs()
# reset any disturbances in the terrain also (eg. obstacles)
pass
def _get_observation(self):
FPV_image = self._get_FPV_image()
_,base_orientation = self.robot._get_base_pose()
motor_angles, motor_velocities = self.robot._get_motor_states()
# flatten the observation and return accordingly
return FPV_image
def _get_FPV_image(self):
#FPV Camera Properties
width = 128
height = 128
fov = 60
aspect = width / height
near = 0.02
far = 20
#View camera transformatios
pos,ori = self.robot._get_base_pose()
ori = -1*np.array(ori)
camera_point, _ = pybullet.multiplyTransforms(pos, ori, [0.2+near,0,0], [0,0,0,1])
target_point, _ = pybullet.multiplyTransforms(pos, ori, [0.2+far,0,0], [0,0,0,1])
up_vector, _ = pybullet.multiplyTransforms(pos, ori, [0,0,1], [0,0,0,1])
view_matrix = pybullet.computeViewMatrix(camera_point, target_point, up_vector)
projection_matrix = pybullet.computeProjectionMatrixFOV(fov, aspect, near, far)
# Get depth values using the OpenGL renderer
images = pybullet.getCameraImage(width,
height,
view_matrix,
projection_matrix,
shadow=True,
renderer=pybullet.ER_BULLET_HARDWARE_OPENGL)
#rgb and depth components
rgb_opengl = np.reshape(images[2], (height, width, 4))
depth_buffer_opengl = np.reshape(images[3], [width, height])
depth_opengl = far * near / (far - (far - near) * depth_buffer_opengl)
seg_opengl = np.reshape(images[4], [width, height]) * 1. / 255.
# converting to openCV colour space
rgb_image = cv2.cvtColor(rgb_opengl, cv2.COLOR_BGR2RGB)
return rgb_image
| 37.506579
| 287
| 0.619891
|
4a0adf403b61ec03a0e0ec17a87ef3b59a0d5fed
| 187
|
py
|
Python
|
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
contact_us/apps.py
|
ohahlev/ahlev-django-contact-us
|
a1e8e22bba16ca79ee355ac12a4627df29f76ce8
|
[
"BSD-3-Clause"
] | null | null | null |
from django.apps import AppConfig
from . import __version__ as VERSION
class ContactUsConfig(AppConfig):
name = "contact_us"
verbose_name = "Contact Us Management %s" % VERSION
| 23.375
| 55
| 0.754011
|
cd289353e446ddbb6307bfde167fcf5a3b506ea9
| 2,130
|
py
|
Python
|
eval.py
|
SeoSangwoo/gcn-over-pruned-trees
|
d82cf374457a8da84719d89d414330ae4849d5ba
|
[
"Apache-2.0"
] | 380
|
2018-09-27T19:02:49.000Z
|
2022-03-10T12:19:32.000Z
|
eval.py
|
RexYing/gcn-over-pruned-trees
|
e574f4d0336bf807791dde1e97f406c0001e3397
|
[
"Apache-2.0"
] | 24
|
2018-10-03T15:03:01.000Z
|
2021-06-10T11:55:15.000Z
|
eval.py
|
RexYing/gcn-over-pruned-trees
|
e574f4d0336bf807791dde1e97f406c0001e3397
|
[
"Apache-2.0"
] | 75
|
2018-10-10T07:06:13.000Z
|
2022-03-20T14:01:37.000Z
|
"""
Run evaluation with saved models.
"""
import random
import argparse
from tqdm import tqdm
import torch
from data.loader import DataLoader
from model.trainer import GCNTrainer
from utils import torch_utils, scorer, constant, helper
from utils.vocab import Vocab
parser = argparse.ArgumentParser()
parser.add_argument('model_dir', type=str, help='Directory of the model.')
parser.add_argument('--model', type=str, default='best_model.pt', help='Name of the model file.')
parser.add_argument('--data_dir', type=str, default='dataset/tacred')
parser.add_argument('--dataset', type=str, default='test', help="Evaluate on dev or test.")
parser.add_argument('--seed', type=int, default=1234)
parser.add_argument('--cuda', type=bool, default=torch.cuda.is_available())
parser.add_argument('--cpu', action='store_true')
args = parser.parse_args()
torch.manual_seed(args.seed)
random.seed(1234)
if args.cpu:
args.cuda = False
elif args.cuda:
torch.cuda.manual_seed(args.seed)
# load opt
model_file = args.model_dir + '/' + args.model
print("Loading model from {}".format(model_file))
opt = torch_utils.load_config(model_file)
trainer = GCNTrainer(opt)
trainer.load(model_file)
# load vocab
vocab_file = args.model_dir + '/vocab.pkl'
vocab = Vocab(vocab_file, load=True)
assert opt['vocab_size'] == vocab.size, "Vocab size must match that in the saved model."
# load data
data_file = opt['data_dir'] + '/{}.json'.format(args.dataset)
print("Loading data from {} with batch size {}...".format(data_file, opt['batch_size']))
batch = DataLoader(data_file, opt['batch_size'], opt, vocab, evaluation=True)
helper.print_config(opt)
label2id = constant.LABEL_TO_ID
id2label = dict([(v,k) for k,v in label2id.items()])
predictions = []
all_probs = []
batch_iter = tqdm(batch)
for i, b in enumerate(batch_iter):
preds, probs, _ = trainer.predict(b)
predictions += preds
all_probs += probs
predictions = [id2label[p] for p in predictions]
p, r, f1 = scorer.score(batch.gold(), predictions, verbose=True)
print("{} set evaluate result: {:.2f}\t{:.2f}\t{:.2f}".format(args.dataset,p,r,f1))
print("Evaluation ended.")
| 31.791045
| 97
| 0.730986
|
bfeeaa3d5b2123742619d80d2b6aa1ad057d1348
| 1,898
|
py
|
Python
|
scripts/spack/packages/py-polyclipper/package.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 22
|
2018-07-31T21:38:22.000Z
|
2020-06-29T08:58:33.000Z
|
scripts/spack/packages/py-polyclipper/package.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 17
|
2020-01-05T08:41:46.000Z
|
2020-09-18T00:08:32.000Z
|
scripts/spack/packages/py-polyclipper/package.py
|
jmikeowen/Spheral
|
3e1082a7aefd6b328bd3ae24ca1a477108cfc3c4
|
[
"BSD-Source-Code",
"BSD-3-Clause-LBNL",
"FSFAP"
] | 7
|
2019-12-01T07:00:06.000Z
|
2020-09-15T21:12:39.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPolyclipper(CMakePackage, PythonPackage):
"""Polyclipper"""
homepage = "https://pypi.org/project/PYB11Generator/"
url = "https://github.com/LLNL/PolyClipper/archive/refs/tags/v1.2.3.zip"
git = "https://github.com/LLNL/PolyClipper"
maintainers = ['mdavis36','jmikeowen']
version('1.2.3', sha256='366e547bc343033c760727b6cdbf34a304c27bc769a208e9bfaeec42c92dba96')
variant('mpi', default=False, description='Enable MPI Support.')
variant('openmp', default=True, description='Enable OpenMP Support.')
variant('docs', default=False, description='Enable building Docs.')
depends_on('mpi', when='+mpi')
depends_on('blt')
depends_on('py-pybind11')
depends_on('py-pyb11generator')
depends_on('py-decorator')
def cmake_args(self):
spec = self.spec
args = []
args.append(self.define('POLYCLIPPER_BLT_DIR', spec['blt'].prefix))
args.append(self.define('ENABLE_CXXONLY', True))
args.append(self.define('PYTHON_EXE', spec['python'].prefix+'/bin/python'))
args.append(self.define('PYBIND11_INCLUDE_PATH', spec['py-pybind11'].prefix+'/include'))
args.append(self.define('PYB11GEN_PATH', spec['py-pyb11generator'].prefix+'/lib/python2.7/site-packages'))
args.append(self.define('ENABLE_MPI', '+mpi' in spec))
if "+mpi" in spec:
args.append(self.define('MPI_C_COMPILER', spec['mpi'].mpicc) )
args.append(self.define('MPI_CXX_COMPILER', spec['mpi'].mpicxx) )
args.append(self.define('ENABLE_OPENMP', '+openmp' in spec))
args.append(self.define('ENABLE_DOCS', '+docs' in spec))
return args
| 38.734694
| 114
| 0.675448
|
766d8dc8631798a874530df8264e1bcc3604d8ec
| 13,956
|
py
|
Python
|
tasks/_iblrig_tasks_ephysChoiceWorld/session_params.py
|
alejandropan/iblrig
|
d8e746ccc52c2ad325404077ad2403e165e94d0c
|
[
"MIT"
] | null | null | null |
tasks/_iblrig_tasks_ephysChoiceWorld/session_params.py
|
alejandropan/iblrig
|
d8e746ccc52c2ad325404077ad2403e165e94d0c
|
[
"MIT"
] | 4
|
2019-04-03T06:11:48.000Z
|
2019-06-14T00:12:19.000Z
|
tasks/_iblrig_tasks_ephysChoiceWorld/session_params.py
|
alejandropan/iblrig
|
d8e746ccc52c2ad325404077ad2403e165e94d0c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Niccolò Bonacchi
# @Date: 2018-02-02 17:19:09
import os
import sys
from sys import platform
from pathlib import Path
import logging
from pythonosc import udp_client
from ibllib.graphic import numinput, multi_input
sys.path.append(str(Path(__file__).parent.parent)) # noqa
sys.path.append(str(Path(__file__).parent.parent.parent.parent)) # noqa
import adaptive
import ambient_sensor
import bonsai
import iotasks
import misc
import sound
from path_helper import SessionPathCreator
from rotary_encoder import MyRotaryEncoder
log = logging.getLogger('iblrig')
class SessionParamHandler(object):
"""Session object imports user_settings and task_settings
will and calculates other secondary session parameters,
runs Bonsai and saves all params in a settings file.json"""
def __init__(self, task_settings, user_settings, debug=False, fmake=True):
self.DEBUG = debug
make = True
self.IBLRIG_FOLDER = 'C:\\iblrig'
self.IBLRIG_DATA_FOLDER = None # ..\\iblrig_data if None
# =====================================================================
# IMPORT task_settings, user_settings, and SessionPathCreator params
# =====================================================================
ts = {i: task_settings.__dict__[i]
for i in [x for x in dir(task_settings) if '__' not in x]}
self.__dict__.update(ts)
us = {i: user_settings.__dict__[i]
for i in [x for x in dir(user_settings) if '__' not in x]}
self.__dict__.update(us)
self = iotasks.deserialize_pybpod_user_settings(self)
spc = SessionPathCreator(self.IBLRIG_FOLDER, self.IBLRIG_DATA_FOLDER,
self.PYBPOD_SUBJECTS[0],
protocol=self.PYBPOD_PROTOCOL,
board=self.PYBPOD_BOARD, make=make)
self.__dict__.update(spc.__dict__)
# =====================================================================
# SETTINGS
# =====================================================================
self.RECORD_SOUND = True
self.RECORD_AMBIENT_SENSOR_DATA = True
self.RECORD_VIDEO = True
self.OPEN_CAMERA_VIEW = True # Always True if RECORD_VIDEO is True
self.NTRIALS = 2000 # Number of trials for the current session
self.USE_AUTOMATIC_STOPPING_CRITERIONS = True # Weather to check for the Automatic stopping criterions or not # noqa
self.REPEAT_ON_ERROR = False # not used
self.INTERACTIVE_DELAY = 0.0
self.RESPONSE_WINDOW = 60
self.ITI_CORRECT = 1
self.ITI_ERROR = 2
self.CONTRAST_SET = [1., 0.25, 0.125, 0.0625, 0.] # Full contrast set
self.CONTRAST_SET_PROBABILITY_TYPE = 'biased'
self.STIM_FREQ = 0.10 # Probably constant - NOT IN USE
self.STIM_ANGLE = 0. # Vertical orientation of Gabor patch
self.STIM_SIGMA = 7. # (azimuth_degree) Size of Gabor patch
self.STIM_GAIN = 4. # (azimuth_degree/mm) Gain of the RE
# =====================================================================
# SUBJECT
# =====================================================================
self.SUBJECT_WEIGHT = self.get_subject_weight()
self.POOP_COUNT = True
# =====================================================================
# OSC CLIENT
# =====================================================================
self.OSC_CLIENT_PORT = 7110
self.OSC_CLIENT_IP = '127.0.0.1'
self.OSC_CLIENT = udp_client.SimpleUDPClient(self.OSC_CLIENT_IP,
self.OSC_CLIENT_PORT)
# =====================================================================
# PREVIOUS DATA FILES
# =====================================================================
self.LAST_TRIAL_DATA = iotasks.load_data(self.PREVIOUS_SESSION_PATH)
self.LAST_SETTINGS_DATA = iotasks.load_settings(
self.PREVIOUS_SESSION_PATH)
self.SESSION_ORDER = []
self.SESSION_IDX = None
self = iotasks.load_session_order_and_idx(self)
# Load from file
self.POSITIONS = None
self.CONTRASTS = None
self.QUIESCENT_PERIOD = None
self.STIM_PHASE = None
self.LEN_BLOCKS = None
self = iotasks.load_session_pcqs(self)
# =====================================================================
# ADAPTIVE STUFF
# =====================================================================
self.AUTOMATIC_CALIBRATION = True
self.CALIBRATION_VALUE = 0.067
self.REWARD_AMOUNT = 3.
self.REWARD_TYPE = 'Water 10% Sucrose'
self.CALIB_FUNC = adaptive.init_calib_func(self)
self.CALIB_FUNC_RANGE = adaptive.init_calib_func_range(self)
self.REWARD_VALVE_TIME = adaptive.init_reward_valve_time(self)
# =====================================================================
# ROTARY ENCODER
# =====================================================================
self.STIM_POSITIONS = [-35, 35] # All possible positions (deg)
self.QUIESCENCE_THRESHOLDS = [-2, 2] # degree
self.ALL_THRESHOLDS = (self.STIM_POSITIONS +
self.QUIESCENCE_THRESHOLDS)
self.ROTARY_ENCODER = MyRotaryEncoder(self.ALL_THRESHOLDS,
self.STIM_GAIN,
self.COM['ROTARY_ENCODER'])
# =====================================================================
# SOUNDS
# =====================================================================
self.SOFT_SOUND = None
self.SOUND_SAMPLE_FREQ = sound.sound_sample_freq(self.SOFT_SOUND)
self.SOUND_BOARD_BPOD_PORT = 'Serial3'
self.WHITE_NOISE_DURATION = float(0.5)
self.WHITE_NOISE_AMPLITUDE = float(0.05)
self.GO_TONE_DURATION = float(0.1)
self.GO_TONE_FREQUENCY = int(5000)
self.GO_TONE_AMPLITUDE = float(0.1)
self.SD = sound.configure_sounddevice(
output=self.SOFT_SOUND, samplerate=self.SOUND_SAMPLE_FREQ)
# Create sounds and output actions of state machine
self.GO_TONE = sound.make_sound(
rate=self.SOUND_SAMPLE_FREQ, frequency=self.GO_TONE_FREQUENCY,
duration=self.GO_TONE_DURATION, amplitude=self.GO_TONE_AMPLITUDE,
fade=0.01, chans='stereo')
self.WHITE_NOISE = sound.make_sound(
rate=self.SOUND_SAMPLE_FREQ, frequency=-1,
duration=self.WHITE_NOISE_DURATION,
amplitude=self.WHITE_NOISE_AMPLITUDE, fade=0.01, chans='stereo')
self.GO_TONE_IDX = 2
self.WHITE_NOISE_IDX = 3
sound.configure_sound_card(
sounds=[self.GO_TONE, self.WHITE_NOISE],
indexes=[self.GO_TONE_IDX, self.WHITE_NOISE_IDX],
sample_rate=self.SOUND_SAMPLE_FREQ)
# =====================================================================
# VISUAL STIM
# =====================================================================
self.SYNC_SQUARE_X = 0.95
self.SYNC_SQUARE_Y = 0.17
self.USE_VISUAL_STIMULUS = True # Run the visual stim in bonsai
self.BONSAI_EDITOR = False # Open the Bonsai editor of visual stim
bonsai.start_visual_stim(self)
self.get_recording_site_data()
# =====================================================================
# SAVE SETTINGS FILE AND TASK CODE
# =====================================================================
if not self.DEBUG:
iotasks.save_session_settings(self)
iotasks.copy_task_code(self)
iotasks.save_task_code(self)
self.bpod_lights(0)
self.display_logs()
# =========================================================================
# METHODS
# =========================================================================
def get_recording_site_data(self):
title = 'Recording site'
fields = ['X (float):', 'Y (float):', 'Z (flaot):', 'D (float):',
'Angle (10 or 20):', 'Origin (bregma or lambda):']
defaults = [None, None, None, None, '10', 'bregma']
types = [float, float, float, float, int, str]
userdata = multi_input(
title=title, add_fields=fields, defaults=defaults)
try:
out = [t(x) for x, t in zip(userdata, types)]
self.REC_SITE = {'xyzd':out[:4], 'angle': out[4], 'origin': out[5]}
return out
except Exception:
log.warning(
f"One or more inputs are of the wrong type. Expected {types}")
return self.get_recording_site_data()
def save_ambient_sensor_reading(self, bpod_instance):
return ambient_sensor.get_reading(bpod_instance,
save_to=self.SESSION_RAW_DATA_FOLDER)
def get_subject_weight(self):
return numinput(
"Subject weighing (gr)", f"{self.PYBPOD_SUBJECTS[0]} weight (gr):",
nullable=False)
def bpod_lights(self, command: int):
fpath = Path(self.IBLRIG_PARAMS_FOLDER) / 'bpod_lights.py'
os.system(f"python {fpath} {command}")
def get_port_events(self, events, name=''):
return misc.get_port_events(events, name=name)
# =========================================================================
# SOUND INTERFACE FOR STATE MACHINE
# =========================================================================
def play_tone(self):
self.SD.play(self.GO_TONE, self.SOUND_SAMPLE_FREQ)
def play_noise(self):
self.SD.play(self.WHITE_NOISE, self.SOUND_SAMPLE_FREQ)
def stop_sound(self):
self.SD.stop()
# =========================================================================
# JSON ENCODER PATCHES
# =========================================================================
def reprJSON(self):
def remove_from_dict(sx):
if "weighings" in sx.keys():
sx["weighings"] = None
if "water_administration" in sx.keys():
sx["water_administration"] = None
return sx
d = self.__dict__.copy()
d['GO_TONE'] = 'go_tone(freq={}, dur={}, amp={})'.format(
self.GO_TONE_FREQUENCY, self.GO_TONE_DURATION,
self.GO_TONE_AMPLITUDE)
d['WHITE_NOISE'] = 'white_noise(freq=-1, dur={}, amp={})'.format(
self.WHITE_NOISE_DURATION, self.WHITE_NOISE_AMPLITUDE)
d['SD'] = str(d['SD'])
d['OSC_CLIENT'] = str(d['OSC_CLIENT'])
d['SESSION_DATETIME'] = self.SESSION_DATETIME.isoformat()
d['CALIB_FUNC'] = str(d['CALIB_FUNC'])
d['CALIB_FUNC_RANGE'] = str(d['CALIB_FUNC_RANGE'])
if isinstance(d['PYBPOD_SUBJECT_EXTRA'], list):
sub = []
for sx in d['PYBPOD_SUBJECT_EXTRA']:
sub.append(remove_from_dict(sx))
d['PYBPOD_SUBJECT_EXTRA'] = sub
elif isinstance(d['PYBPOD_SUBJECT_EXTRA'], dict):
d['PYBPOD_SUBJECT_EXTRA'] = remove_from_dict(
d['PYBPOD_SUBJECT_EXTRA'])
d['LAST_TRIAL_DATA'] = None
d['LAST_SETTINGS_DATA'] = None
d['POSITIONS'] = None
d['CONTRASTS'] = None
d['QUIESCENT_PERIOD'] = None
d['STIM_PHASE'] = None
d['LEN_BLOCKS'] = None
return d
def display_logs(self):
if self.PREVIOUS_DATA_FILE:
msg = f"""
##########################################
PREVIOUS SESSION FOUND
LOADING PARAMETERS FROM: {self.PREVIOUS_DATA_FILE}
PREVIOUS SESSION NUMBER: {self.LAST_SETTINGS_DATA['SESSION_IDX'] + 1}
PREVIOUS NTRIALS: {self.LAST_TRIAL_DATA["trial_num"]}
PREVIOUS WATER DRANK: {self.LAST_TRIAL_DATA['water_delivered']}
PREVIOUS WEIGHT: {self.LAST_SETTINGS_DATA['SUBJECT_WEIGHT']}
##########################################"""
log.info(msg)
if __name__ == '__main__':
"""
SessionParamHandler fmake flag=False disables:
making folders/files;
SessionParamHandler debug flag disables:
running auto calib;
calling bonsai
turning off lights of bpod board
"""
import task_settings as _task_settings
import scratch._user_settings as _user_settings
import datetime
dt = datetime.datetime.now()
dt = [str(dt.year), str(dt.month), str(dt.day),
str(dt.hour), str(dt.minute), str(dt.second)]
dt = [x if int(x) >= 10 else '0' + x for x in dt]
dt.insert(3, '-')
_user_settings.PYBPOD_SESSION = ''.join(dt)
_user_settings.PYBPOD_SETUP = 'biasedChoiceWorld'
_user_settings.PYBPOD_PROTOCOL = '_iblrig_tasks_biasedChoiceWorld'
if platform == 'linux':
r = "/home/nico/Projects/IBL/github/iblrig"
_task_settings.IBLRIG_FOLDER = r
d = ("/home/nico/Projects/IBL/github/iblrig/scratch/" +
"test_iblrig_data")
_task_settings.IBLRIG_DATA_FOLDER = d
_task_settings.AUTOMATIC_CALIBRATION = False
_task_settings.USE_VISUAL_STIMULUS = False
sph = SessionParamHandler(_task_settings, _user_settings,
debug=False, fmake=True)
for k in sph.__dict__:
if sph.__dict__[k] is None:
print(f"{k}: {sph.__dict__[k]}")
self = sph
print("Done!")
| 44.874598
| 127
| 0.517054
|
81b186512ed8f925bdbbee126bb3df7e8aad0161
| 3,793
|
py
|
Python
|
IMLearn/learners/classifiers/gaussian_naive_bayes.py
|
TomShimshi/IML.HUJI
|
68dc17396519f0de4b7f6da8f9fd37f49d8ee3ff
|
[
"MIT"
] | null | null | null |
IMLearn/learners/classifiers/gaussian_naive_bayes.py
|
TomShimshi/IML.HUJI
|
68dc17396519f0de4b7f6da8f9fd37f49d8ee3ff
|
[
"MIT"
] | null | null | null |
IMLearn/learners/classifiers/gaussian_naive_bayes.py
|
TomShimshi/IML.HUJI
|
68dc17396519f0de4b7f6da8f9fd37f49d8ee3ff
|
[
"MIT"
] | null | null | null |
from typing import NoReturn
from ...base import BaseEstimator
import numpy as np
class GaussianNaiveBayes(BaseEstimator):
"""
Gaussian Naive-Bayes classifier
"""
def __init__(self):
"""
Instantiate a Gaussian Naive Bayes classifier
Attributes
----------
self.classes_ : np.ndarray of shape (n_classes,)
The different labels classes. To be set in `GaussianNaiveBayes.fit`
self.mu_ : np.ndarray of shape (n_classes,n_features)
The estimated features means for each class. To be set in `GaussianNaiveBayes.fit`
self.vars_ : np.ndarray of shape (n_classes, n_features)
The estimated features variances for each class. To be set in `GaussianNaiveBayes.fit`
self.pi_: np.ndarray of shape (n_classes)
The estimated class probabilities. To be set in `GaussianNaiveBayes.fit`
"""
super().__init__()
self.classes_, self.mu_, self.vars_, self.pi_ = None, None, None, None
def _fit(self, X: np.ndarray, y: np.ndarray) -> NoReturn:
"""
fits a gaussian naive bayes model
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to fit an estimator for
y : ndarray of shape (n_samples, )
Responses of input data to fit to
"""
self.classes_, classes_count = np.unique(y, return_counts=True)
self.pi_ = classes_count / X.shape[0]
self.mu_ = [np.mean(X[y == k], axis=0) for k in self.classes_]
self.vars_ = [np.mean(np.power(X[y == j] - self.mu_[i], 2), axis=0)
for i, j in enumerate(self.classes_)]
def _predict(self, X: np.ndarray) -> np.ndarray:
"""
Predict responses for given samples using fitted estimator
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Input data to predict responses for
Returns
-------
responses : ndarray of shape (n_samples, )
Predicted responses of given samples
"""
return self.classes_[np.argmax(self.likelihood(X), 1)]
def likelihood(self, X: np.ndarray) -> np.ndarray:
"""
Calculate the likelihood of a given data over the estimated model
Parameters
----------
X : np.ndarray of shape (n_samples, n_features)
Input data to calculate its likelihood over the different classes.
Returns
-------
likelihoods : np.ndarray of shape (n_samples, n_classes)
The likelihood for each sample under each of the classes
"""
if not self.fitted_:
raise ValueError("Estimator must first be fitted before calling `likelihood` function")
samples, features = X.shape
probs = np.zeros((samples, len(self.classes_)))
for c in range(len(self.classes_)):
probs[:, c] = -features / 2 * np.log(2 * np.pi) - 0.5 * np.sum(np.log(self.vars_[c])) \
- (0.5 * np.sum(np.power(X - self.mu_[c], 2) / (self.vars_[c]), 1)) + \
np.log(self.pi_[c])
return probs
def _loss(self, X: np.ndarray, y: np.ndarray) -> float:
"""
Evaluate performance under misclassification loss function
Parameters
----------
X : ndarray of shape (n_samples, n_features)
Test samples
y : ndarray of shape (n_samples, )
True labels of test samples
Returns
-------
loss : float
Performance under missclassification loss function
"""
from ...metrics import misclassification_error
return misclassification_error(y, self._predict(X))
| 33.866071
| 99
| 0.584498
|
d50ac7cf560aba1c525868d897567c11fc74d49e
| 76
|
py
|
Python
|
bulq/plugins/bulq_decoder_gzip/__init__.py
|
koji-m/bulq
|
78f97d2e57d6bcb0ec3fa2b0c7539db3ebaa104a
|
[
"Apache-2.0"
] | null | null | null |
bulq/plugins/bulq_decoder_gzip/__init__.py
|
koji-m/bulq
|
78f97d2e57d6bcb0ec3fa2b0c7539db3ebaa104a
|
[
"Apache-2.0"
] | null | null | null |
bulq/plugins/bulq_decoder_gzip/__init__.py
|
koji-m/bulq
|
78f97d2e57d6bcb0ec3fa2b0c7539db3ebaa104a
|
[
"Apache-2.0"
] | null | null | null |
from . import bulq_decoder_gzip
plugin = bulq_decoder_gzip.BulqDecoderGzip
| 19
| 42
| 0.855263
|
f51846240604d696334916648d7e1bcad1a1dd99
| 5,904
|
py
|
Python
|
debiai/debiai_selection.py
|
DebiAI/py-debiai
|
fd784fd1ca7a59c38714275b6fad53ba9f09eaa7
|
[
"Apache-2.0"
] | 1
|
2022-03-01T13:17:16.000Z
|
2022-03-01T13:17:16.000Z
|
debiai/debiai_selection.py
|
debiai/py-debiai
|
fd784fd1ca7a59c38714275b6fad53ba9f09eaa7
|
[
"Apache-2.0"
] | null | null | null |
debiai/debiai_selection.py
|
debiai/py-debiai
|
fd784fd1ca7a59c38714275b6fad53ba9f09eaa7
|
[
"Apache-2.0"
] | null | null | null |
from typing import List
import numpy as np
import pandas as pd
import hashlib
import utils as utils
import debiai_utils as debiai_utils
DEBIAI_TYPES = ["contexts", "inputs", "groundTruth", "others"]
class Debiai_selection:
"""
A Debiai data selection : a list of sample id
It can belongs to a request
"""
def __init__(self, project, name: str, id: str, creationDate: int, nbSamples: str, requestId: str):
self.project = project
self.name = name
self.id = id
self.creationDate = creationDate
self.nbSamples = nbSamples
self.requestId = requestId
def __repr__(self):
return (
"DEBIAI selection : '" + str(self.name) + "'\n"
"creation date : '" +
utils.timestamp_to_date(self.creationDate) + "'\n"
"number of samples : '" + str(self.nbSamples) + "'\n"
)
def get_numpy(self) -> np.array:
# Pulls all the selection data
sampleTree = utils.get_samples_from_selection(
self.project.backend_url, self.project.id, self.id)
block_structure = self.project.project_infos()['blockLevelInfo']
"""
tree structure :
[
{
'id',
'creationDate',
'groundTruth',
'inputs',
'level',
'metaDataList',
'name',
'parentPath',
'path',
'updateDate',
'version',
'childrenInfoList' : {
}
}
]
"""
columns = np.array([])
# Create the first row with the column names
for block in block_structure:
columns = np.append(columns, block['name'])
for debiai_type in DEBIAI_TYPES:
if debiai_type in block:
for column in block[debiai_type]:
columns = np.append(columns, column['name'])
data = debiai_utils.tree_to_array(block_structure, sampleTree)
return np.vstack([columns, data])
def get_dataframe(self) -> pd.DataFrame:
# Pull the selected samples from the backend
# returns a pd.DataFrame
numpy = self.get_numpy()
col = numpy[0]
df = pd.DataFrame(data=numpy[1:], columns=col)
# Convert object columns to number columns
cols = df.columns[df.dtypes.eq('object')]
df[cols] = df[cols].apply(pd.to_numeric, errors='ignore')
return df
# Tensorflow dataset generator
def get_tf_dataset(self) -> 'tf.data.Dataset':
import tensorflow as tf
block_structure = self.project.project_infos()['blockLevelInfo']
excepted_inputs = []
excepted_gdt = []
for level in block_structure:
if "inputs" in level:
excepted_inputs += level['inputs']
if "groundTruth" in level:
excepted_gdt += level['groundTruth']
return tf.data.Dataset.from_generator(
self.__load_samples, (tf.float32, tf.int32),
((len(excepted_inputs), ), (len(excepted_gdt), ))
)
def __load_samples(self):
PACH_SIZE = 1000 # Pull samples each PACH_SIZE samples
for i in range(0, self.nbSamples, PACH_SIZE):
# Pull a sample tree
sampleTree = utils.get_training_samples_from_selection(
self.project.backend_url, self.project.id, self.id, i, PACH_SIZE)
# Extract inputs & gdt
inputs, gdt = debiai_utils.get_inputs_and_gdt_patch(
self.project.block_structure, sampleTree)
# Pull undirect inputs from external source (image, ...) from inputs
# TODO : try with Faurecia
# Yield each one of the samples to the dataset
for j in range(len(inputs)):
yield inputs[j], gdt[j]
# TODO : create a clean progress bar
print(str(i) + "/" + str(self.nbSamples))
def get_tf_dataset_with_provided_inputs(self,
input_function: 'function',
output_types: tuple,
output_shapes: tuple,
classes: list
) -> 'tf.data.Dataset':
import tensorflow as tf
self.dataset_generator_input_function = input_function
self.dataset_generator_classes = classes
block_structure = self.project.project_infos()['blockLevelInfo']
return tf.data.Dataset.from_generator(self.__load_samples_with_provided_inputs,
output_types=output_types,
output_shapes=output_shapes)
def __load_samples_with_provided_inputs(self):
PACH_SIZE = 1000 # Pull samples each PACH_SIZE samples
# Only deal with 1 gdt TODO : deal with the case with more than 1 gdt
for i in range(0, self.nbSamples, PACH_SIZE):
# Pull a sample tree
sampleTree = utils.get_training_samples_from_selection(
self.project.backend_url, self.project.id, self.id, i, PACH_SIZE)
# Extract samples & gdt
samples, gdt = debiai_utils.get_samples_and_gdt_patch(
self.project.block_structure, sampleTree)
# Yield each one of the samples to the dataset
for j in range(len(samples)):
inputs = self.dataset_generator_input_function(samples[j])
gdt_number = self.dataset_generator_classes.index(gdt[j][0])
yield inputs, [gdt_number]
# TODO : create a clean progress bar
| 35.353293
| 103
| 0.553523
|
10ef60d0db61e4169244f36332b8b8a0bf4535f3
| 9,248
|
py
|
Python
|
tensorflow/contrib/solvers/python/ops/lanczos.py
|
PedroLelis/tensorflow
|
8852b0032ad49acbc59009776665c60f86c06f91
|
[
"Apache-2.0"
] | 1
|
2019-02-16T10:41:53.000Z
|
2019-02-16T10:41:53.000Z
|
tensorflow/contrib/solvers/python/ops/lanczos.py
|
PedroLelis/tensorflow
|
8852b0032ad49acbc59009776665c60f86c06f91
|
[
"Apache-2.0"
] | null | null | null |
tensorflow/contrib/solvers/python/ops/lanczos.py
|
PedroLelis/tensorflow
|
8852b0032ad49acbc59009776665c60f86c06f91
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Lanczos algorithms."""
# TODO(rmlarsen): Add implementation of symmetric Lanczos algorithm.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import tensorflow as tf
from tensorflow.contrib.solvers.python.ops import util
def lanczos_bidiag(operator,
k,
orthogonalize=True,
starting_vector=None,
name="lanczos_bidiag"):
"""Computes a Lanczos bidiagonalization for a linear operator.
Computes matrices `U` of shape `[m, k+1]`, `V` of shape `[n, k]` and lower
bidiagonal matrix `B` of shape `[k+1, k]`, that satisfy the equations
`A * V = U * B` and `A' * U[:, :-1] = V * B[:-1, :]'`.
The columns of `U` are orthonormal and form a basis for the Krylov subspace
`K(A*A', U[:,0])`.
The columns of `V` are orthonormal and form a basis for the Krylov subspace
`K(A'*A, A' U[:,0])`.
Args:
operator: An object representing a linear operator with attributes:
- shape: Either a list of integers or a 1-D `Tensor` of type `int32` of
length 2. `shape[0]` is the dimension on the domain of the operator,
`shape[1]` is the dimension of the co-domain of the operator. On other
words, if operator represents an M x N matrix A, `shape` must contain
`[M, N]`.
- dtype: The datatype of input to and output from `apply` and
`apply_adjoint`.
- apply: Callable object taking a vector `x` as input and returning a
vector with the result of applying the operator to `x`, i.e. if
`operator` represents matrix `A`, `apply` should return `A * x`.
- apply_adjoint: Callable object taking a vector `x` as input and
returning a vector with the result of applying the adjoint operator
to `x`, i.e. if `operator` represents matrix `A`, `apply_adjoint` should
return `conj(transpose(A)) * x`.
k: An integer or a scalar Tensor of type `int32`. Determines the maximum
number of steps to run. If an invariant subspace is found, the algorithm
may terminate before `k` steps have been run.
orthogonalize: If `True`, perform full orthogonalization. If `False` no
orthogonalization is performed.
starting_vector: If not null, must be a `Tensor` of shape `[n]`.
name: A name scope for the operation.
Returns:
output: A namedtuple representing a Lanczos bidiagonalization of
`operator` with attributes:
u: A rank-2 `Tensor` of type `operator.dtype` and shape
`[operator.shape[0], k_actual+1]`, where `k_actual` is the number of
steps run.
v: A rank-2 `Tensor` of type `operator.dtype` and shape
`[operator.shape[1], k_actual]`, where `k_actual` is the number of steps
run.
alpha: A rank-1 `Tensor` of type `operator.dtype` and shape `[k]`.
beta: A rank-1 `Tensor` of type `operator.dtype` and shape `[k]`.
"""
def tarray(size, dtype, name):
return tf.TensorArray(
dtype=dtype,
size=size,
tensor_array_name=name,
clear_after_read=False)
# Reads a row-vector at location i in tarray and returns it as a
# column-vector.
def read_colvec(tarray, i):
return tf.expand_dims(tarray.read(i), -1)
# Writes an column-vector as a row-vecor at location i in tarray.
def write_colvec(tarray, colvec, i):
return tarray.write(i, tf.squeeze(colvec))
# Ephemeral class holding Lanczos bidiagonalization state:
# u = left Lanczos vectors
# v = right Lanczos vectors
# alpha = diagonal of B_k.
# beta = subdiagonal of B_k.
# Notice that we store the left and right Lanczos vectors as the _rows_
# of u and v. This is done because tensors are stored row-major and
# TensorArray only supports packing along dimension 0.
lanzcos_bidiag_state = collections.namedtuple("LanczosBidiagState",
["u", "v", "alpha", "beta"])
def update_state(old, i, u, v, alpha, beta):
return lanzcos_bidiag_state(
write_colvec(old.u, u, i + 1),
write_colvec(old.v, v, i),
old.alpha.write(i, alpha),
old.beta.write(i, beta))
def gram_schmidt_step(j, basis, v):
"""Makes v orthogonal to the j'th vector in basis."""
v_shape = v.get_shape()
basis_vec = read_colvec(basis, j)
v -= tf.batch_matmul(basis_vec, v, adj_x=True) * basis_vec
v.set_shape(v_shape)
return j + 1, basis, v
def orthogonalize_once(i, basis, v):
j = tf.constant(0, dtype=tf.int32)
_, _, v = tf.while_loop(lambda j, basis, v: j < i, gram_schmidt_step,
[j, basis, v])
return util.l2normalize(v)
# Iterated modified Gram-Schmidt orthogonalization adapted from PROPACK.
# TODO(rmlarsen): This is possibly the slowest implementation of
# iterated Gram-Schmidt orthogonalization since the abacus. Move to C++.
def orthogonalize_(i, basis, v):
v_norm = util.l2norm(v)
v_new, v_new_norm = orthogonalize_once(i, basis, v)
# If the norm decreases more than 1/sqrt(2), run a second
# round of MGS. See proof in:
# B. N. Parlett, ``The Symmetric Eigenvalue Problem'',
# Prentice-Hall, Englewood Cliffs, NJ, 1980. pp. 105-109
return tf.cond(v_new_norm < 0.7071 * v_norm,
lambda: orthogonalize_once(i, basis, v),
lambda: (v_new, v_new_norm))
def stopping_criterion(i, _):
# TODO(rmlarsen): Stop if an invariant subspace is detected.
return i < k
def lanczos_bidiag_step(i, ls):
"""Extends the Lanczos bidiagonalization ls by one step."""
u = read_colvec(ls.u, i)
r = operator.apply_adjoint(u)
# The shape inference doesn't work across cond, save and reapply the shape.
r_shape = r.get_shape()
r = tf.cond(
i > 0,
lambda: r - ls.beta.read(i - 1) * read_colvec(ls.v, i - 1),
lambda: r)
r.set_shape(r_shape)
if orthogonalize:
v, alpha = orthogonalize_(i - 1, ls.v, r)
else:
v, alpha = util.l2normalize(r)
p = operator.apply(v) - alpha * u
if orthogonalize:
u, beta = orthogonalize_(i, ls.u, p)
else:
u, beta = util.l2normalize(p)
return i + 1, update_state(ls, i, u, v, alpha, beta)
with tf.name_scope(name):
dtype = operator.dtype
if starting_vector is None:
starting_vector = tf.random_uniform(
operator.shape[:1], -1, 1, dtype=dtype)
u0, _ = util.l2normalize(starting_vector)
ls = lanzcos_bidiag_state(
u=write_colvec(tarray(k + 1, dtype, "u"), u0, 0),
v=tarray(k, dtype, "v"),
alpha=tarray(k, dtype, "alpha"),
beta=tarray(k, dtype, "beta"))
i = tf.constant(0, dtype=tf.int32)
_, ls = tf.while_loop(stopping_criterion, lanczos_bidiag_step, [i, ls])
return lanzcos_bidiag_state(
tf.matrix_transpose(ls.u.pack()),
tf.matrix_transpose(ls.v.pack()), ls.alpha.pack(), ls.beta.pack())
# TODO(rmlarsen): Implement C++ ops for handling bidiagonal matrices
# efficiently. Such a module should provide
# - multiplication,
# - linear system solution by back-substitution,
# - QR factorization,
# - SVD.
def bidiag_matmul(matrix, alpha, beta, adjoint_b=False, name="bidiag_matmul"):
"""Multiplies a matrix by a bidiagonal matrix.
alpha and beta are length k vectors representing the diagonal and first lower
subdiagonal of (K+1) x K matrix B.
If adjoint_b is False, computes A * B as follows:
A * B = A[:, :-1] * diag(alpha) + A[:, 1:] * diag(beta)
If adjoint_b is True, computes A * B[:-1, :]' as follows
A * B[:-1, :]' =
A * diag(alpha) + [zeros(m,1), A[:, :-1] * diag(beta[:-1])]
Args:
matrix: A rank-2 `Tensor` representing matrix A.
alpha: A rank-1 `Tensor` representing the diagonal of B.
beta: A rank-1 `Tensor` representing the lower subdiagonal diagonal of B.
adjoint_b: `bool` determining what to compute.
name: A name scope for the operation.
Returns:
If `adjoint_b` is False the `A * B` is returned.
If `adjoint_b` is True the `A * B'` is returned.
"""
with tf.name_scope(name):
alpha = tf.expand_dims(alpha, 0)
if adjoint_b is False:
beta = tf.expand_dims(beta, 0)
return matrix[:, :-1] * alpha + matrix[:, 1:] * beta
else:
beta = tf.expand_dims(beta[:-1], 0)
shape = tf.shape(matrix)
zero_column = tf.expand_dims(tf.zeros(shape[:1], dtype=matrix.dtype), 1)
return matrix * alpha + tf.concat(1, [zero_column, matrix[:, :-1] * beta])
| 39.862069
| 80
| 0.645978
|
39668ef2c9bf86f7ca263d20545e4676ec8ebb84
| 4,843
|
py
|
Python
|
arrays_strings/one_edit_away.py
|
KeirSimmons/cracking-the-coding-interview
|
66248cec9eab71184d691f1f5652d97f5d964c9e
|
[
"MIT"
] | null | null | null |
arrays_strings/one_edit_away.py
|
KeirSimmons/cracking-the-coding-interview
|
66248cec9eab71184d691f1f5652d97f5d964c9e
|
[
"MIT"
] | null | null | null |
arrays_strings/one_edit_away.py
|
KeirSimmons/cracking-the-coding-interview
|
66248cec9eab71184d691f1f5652d97f5d964c9e
|
[
"MIT"
] | null | null | null |
class OneEditAway:
"""Checks whether or not two strings are at most one edit away (default). This maximum number
of allowed edits can actually be changed in the init function (to 0, or 2, for example).
Keyword arguments:
strA -- [str] the first string to check
strB -- [str] the second string to check
Special:
The class instantiation itself can be printed to give the result (a string representation of
True or False).
"""
def __init__(self, strA, strB):
self.max_edits = 1 # Maximum number of edits that can be made
self.passed = self.check(strA, strB) # Call the checking function
def check(self, strA, strB, edits=0):
"""
Returns whether or not strA and strB are at most self.max_edits away
This is a recursive function (using Dynamic Programming principles) which makes the
necessary edits (removing either the first or last character of one of the strings) whilst
increasing the edits count until either the remaining strings are the same (True) or we run
out of edits (False). For this to work, every time the first character of both strings are
the same, we remove these characters and continue.
Keyword arguments:
strA -- [str] the first string to check
strB -- [str] the second string to check
edits -- [int] (optional) (default:0) the number of edits already made
Return values:
[bool] -- whether or not strA is at most self.max_edits edits away from strB
"""
if edits > self.max_edits:
"""We've already made too many edits (False)
"""
return False
elif strA == strB:
"""The strings are the same (True)
"""
return True
elif abs(len(strA) - len(strB)) > self.max_edits:
"""The difference in length of the strings is more than the possible allowed number of
edits, so the minimal number of edits needed to make for both strings to be equal will
be at least higher than what is allowed (False)
"""
return False
elif len(strA) == 0 or len(strB) == 0:
"""One of the remaining strings is out of characters, so we can't remove any more from
it. The remaining number of characters in the other string (max_len) is how many more
edits will be needed to make both strings equal. This plus the current number of edits
must be less than the maximum number allowed (i.e. edits + max_len <= max_edits for
True, otherwise False). The equality sign here also works based on the program flow."""
max_len = max(len(strA), len(strB))
return edits == self.max_edits - max_len
elif strA[0] == strB[0]:
"""This is the heart of the recursion. If the starting character of both strings are
the same, we can return the result from running this checking function on the
substrings removing these first characters. I.e.
`check("abc", "abc") = check("bc", "bc")`
"""
return self.check(strA[1:], strB[1:], edits=edits)
elif len(strA) > len(strB):
"""The starting characters are different, and string A is greater than string B. So
let's look at the result of checking string B against string A with the first character
removed, and the result of checking string B against string A with the last character
removed. If either of these return True, we return True. This is another recursion
call. We increase the number of edits here by 1 to keep count.
"""
return self.check(strA[1:], strB, edits=edits + 1) or self.check(
strA[:-1], strB, edits=edits + 1
)
elif len(strB) > len(strA):
"""Same as the previous check, but swapping string A and B around.
"""
return self.check(strA, strB[1:], edits=edits + 1) or self.check(
strA, strB[:-1], edits=edits + 1
)
else:
"""If we reach this stage, then we still have at least one edit left, the strings do
not match, the first characters do not match, and the strings are of the same length.
In that case, return the result from removing the first character from both strings,
i.e. making one edit (and therefore also increasing the edit count by 1).
"""
return self.check(strA[1:], strB[1:], edits=edits + 1)
def __str__(self):
"""For printing purposes.
"""
return str(self.passed)
if __name__ == "__main__":
strA = input("String A? ")
strB = input("String B? ")
print(OneEditAway(strA, strB))
| 48.43
| 99
| 0.616353
|
f0883b39b8c1a0abc03a3f390d27e586d5bc980c
| 6,124
|
py
|
Python
|
king_phisher/server/graphql/types/misc.py
|
chachabooboo/king-phisher
|
8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d
|
[
"BSD-3-Clause"
] | 1,143
|
2015-01-12T15:05:16.000Z
|
2020-04-12T16:10:19.000Z
|
king_phisher/server/graphql/types/misc.py
|
chachabooboo/king-phisher
|
8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d
|
[
"BSD-3-Clause"
] | 399
|
2015-01-22T15:20:03.000Z
|
2020-04-08T23:01:46.000Z
|
king_phisher/server/graphql/types/misc.py
|
chachabooboo/king-phisher
|
8a91b9043de0f12b5cad9a5f1d64ebd0179a6c4d
|
[
"BSD-3-Clause"
] | 351
|
2015-02-02T21:39:38.000Z
|
2020-03-21T11:45:20.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# king_phisher/server/graphql/types/misc.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import absolute_import
import datetime
import functools
import king_phisher.geoip as geoip
import king_phisher.ipaddress as ipaddress
import geoip2.errors
import graphene.relay
import graphene.types.utils
import graphql.language.ast
import graphql_relay.connection.arrayconnection
__all__ = ('ConnectionField', 'GeoLocation', 'Plugin', 'PluginConnection', 'RelayNode')
# custom enum types
class FilterOperatorEnum(graphene.Enum):
EQ = 'eq'
GE = 'ge'
GT = 'gt'
LE = 'le'
LT = 'lt'
NE = 'ne'
class SortDirectionEnum(graphene.Enum):
AESC = 'aesc'
DESC = 'desc'
# misc definitions
class RelayNode(graphene.relay.Node):
@classmethod
def from_global_id(cls, global_id):
return global_id
@classmethod
def to_global_id(cls, _, local_id):
return local_id
class ConnectionField(graphene.relay.ConnectionField):
@classmethod
def connection_resolver(cls, resolver, connection, root, info, **kwargs):
iterable = resolver(root, info, **kwargs)
_len = len(iterable)
connection = graphql_relay.connection.arrayconnection.connection_from_list_slice(
iterable,
kwargs,
slice_start=0,
list_length=_len,
list_slice_length=_len,
connection_type=functools.partial(connection, total=_len),
pageinfo_type=graphene.relay.connection.PageInfo,
edge_type=connection.Edge
)
connection.iterable = iterable
connection.length = _len
return connection
# custom scalar types
class AnyScalar(graphene.types.Scalar):
@staticmethod
def serialize(dt):
raise NotImplementedError()
@staticmethod
def parse_literal(node):
if isinstance(node, graphql.language.ast.FloatValue):
return float(node.value)
if isinstance(node, graphql.language.ast.IntValue):
return int(node.value)
return node.value
@staticmethod
def parse_value(value):
return value
class DateTimeScalar(graphene.types.Scalar):
@staticmethod
def serialize(dt):
return dt
@staticmethod
def parse_literal(node):
if isinstance(node, graphql.language.ast.StringValue):
return datetime.datetime.strptime(node.value, '%Y-%m-%dT%H:%M:%S.%f')
@staticmethod
def parse_value(value):
return datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S.%f')
# custom compound types
class GeoLocation(graphene.ObjectType):
city = graphene.Field(graphene.String)
continent = graphene.Field(graphene.String)
coordinates = graphene.List(graphene.Float)
country = graphene.Field(graphene.String)
postal_code = graphene.Field(graphene.String)
time_zone = graphene.Field(graphene.String)
@classmethod
def from_ip_address(cls, ip_address):
ip_address = ipaddress.ip_address(ip_address)
if ip_address.is_private:
return
try:
result = geoip.lookup(ip_address)
except geoip2.errors.AddressNotFoundError:
result = None
if result is None:
return
return cls(**result)
class Plugin(graphene.ObjectType):
class Meta:
interfaces = (RelayNode,)
authors = graphene.List(graphene.String)
classifiers = graphene.List(graphene.String)
description = graphene.Field(graphene.String)
homepage = graphene.Field(graphene.String)
name = graphene.Field(graphene.String)
reference_urls = graphene.List(graphene.String)
title = graphene.Field(graphene.String)
version = graphene.Field(graphene.String)
@classmethod
def from_plugin(cls, plugin):
return cls(
authors=plugin.authors,
classifiers=plugin.classifiers,
description=plugin.description,
homepage=plugin.homepage,
name=plugin.name,
reference_urls=plugin.reference_urls,
title=plugin.title,
version=plugin.version
)
@classmethod
def resolve(cls, info, **kwargs):
plugin_manager = info.context.get('plugin_manager', {})
for _, plugin in plugin_manager:
if plugin.name != kwargs.get('name'):
continue
return cls.from_plugin(plugin)
class PluginConnection(graphene.relay.Connection):
class Meta:
node = Plugin
total = graphene.Int()
@classmethod
def resolve(cls, info, **kwargs):
plugin_manager = info.context.get('plugin_manager', {})
return [Plugin.from_plugin(plugin) for _, plugin in sorted(plugin_manager, key=lambda i: i[0])]
# custom compound input types
class FilterInput(graphene.InputObjectType):
and_ = graphene.List('king_phisher.server.graphql.types.misc.FilterInput', name='and')
or_ = graphene.List('king_phisher.server.graphql.types.misc.FilterInput', name='or')
field = graphene.String()
value = AnyScalar()
operator = FilterOperatorEnum()
class SortInput(graphene.InputObjectType):
field = graphene.String(required=True)
direction = SortDirectionEnum()
| 31.244898
| 97
| 0.762084
|
79689257fd4e0ec51095bba4fa3533cc3e310f6e
| 1,452
|
py
|
Python
|
python-sdk/tutorials/automl-with-azureml/continuous-retraining/check_data.py
|
0mza987/azureml-examples
|
2abb872f1278d4b4e65587e033f38a058512b2e3
|
[
"MIT"
] | 331
|
2020-08-26T15:54:23.000Z
|
2022-03-31T17:10:58.000Z
|
python-sdk/tutorials/automl-with-azureml/continuous-retraining/check_data.py
|
0mza987/azureml-examples
|
2abb872f1278d4b4e65587e033f38a058512b2e3
|
[
"MIT"
] | 262
|
2020-08-25T23:17:17.000Z
|
2022-03-31T00:25:32.000Z
|
python-sdk/tutorials/automl-with-azureml/continuous-retraining/check_data.py
|
0mza987/azureml-examples
|
2abb872f1278d4b4e65587e033f38a058512b2e3
|
[
"MIT"
] | 307
|
2020-09-04T01:02:11.000Z
|
2022-03-31T16:46:48.000Z
|
import argparse
import os
import azureml.core
from datetime import datetime
import pandas as pd
import pytz
from azureml.core import Dataset, Model
from azureml.core.run import Run, _OfflineRun
from azureml.core import Workspace
run = Run.get_context()
ws = None
if type(run) == _OfflineRun:
ws = Workspace.from_config()
else:
ws = run.experiment.workspace
print("Check for new data.")
parser = argparse.ArgumentParser("split")
parser.add_argument("--ds_name", help="input dataset name")
parser.add_argument("--model_name", help="name of the deployed model")
args = parser.parse_args()
print("Argument 1(ds_name): %s" % args.ds_name)
print("Argument 2(model_name): %s" % args.model_name)
# Get the latest registered model
try:
model = Model(ws, args.model_name)
last_train_time = model.created_time
print("Model was last trained on {0}.".format(last_train_time))
except Exception as e:
print("Could not get last model train time.")
last_train_time = datetime.min.replace(tzinfo=pytz.UTC)
train_ds = Dataset.get_by_name(ws, args.ds_name)
dataset_changed_time = train_ds.data_changed_time
if not dataset_changed_time > last_train_time:
print("Cancelling run since there is no new data.")
run.parent.cancel()
else:
# New data is available since the model was last trained
print("Dataset was last updated on {0}. Retraining...".format(dataset_changed_time))
| 30.893617
| 89
| 0.726584
|
59afac174df91a7d526f39179a6992cf9e231411
| 57,163
|
py
|
Python
|
parsedmarc/__init__.py
|
Wouter0100/parsedmarc
|
0ae15ed90cb83b810325ff60c9c4ff595a4ec57d
|
[
"Apache-2.0"
] | null | null | null |
parsedmarc/__init__.py
|
Wouter0100/parsedmarc
|
0ae15ed90cb83b810325ff60c9c4ff595a4ec57d
|
[
"Apache-2.0"
] | null | null | null |
parsedmarc/__init__.py
|
Wouter0100/parsedmarc
|
0ae15ed90cb83b810325ff60c9c4ff595a4ec57d
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""A Python package for parsing DMARC reports"""
import logging
import os
import shutil
import xml.parsers.expat as expat
import json
from datetime import datetime
from collections import OrderedDict
from io import BytesIO, StringIO
from gzip import GzipFile
import zipfile
from csv import DictWriter
import re
from base64 import b64decode
import binascii
import email
import tempfile
import email.utils
import mailbox
import mailparser
from expiringdict import ExpiringDict
import xmltodict
from lxml import etree
from mailsuite.imap import IMAPClient
from mailsuite.smtp import send_email
from parsedmarc.utils import get_base_domain, get_ip_address_info
from parsedmarc.utils import is_outlook_msg, convert_outlook_msg
from parsedmarc.utils import timestamp_to_human, human_timestamp_to_datetime
from parsedmarc.utils import parse_email
__version__ = "6.8.2"
logging.basicConfig(
format='%(levelname)8s:%(filename)s:%(lineno)d:'
'%(message)s',
datefmt='%Y-%m-%d:%H:%M:%S')
logger = logging.getLogger("parsedmarc")
logger.debug("parsedmarc v{0}".format(__version__))
feedback_report_regex = re.compile(r"^([\w\-]+): (.+)$", re.MULTILINE)
xml_header_regex = re.compile(r"^<\?xml .*?>", re.MULTILINE)
xml_schema_regex = re.compile(r"</??xs:schema.*>", re.MULTILINE)
MAGIC_ZIP = b"\x50\x4B\x03\x04"
MAGIC_GZIP = b"\x1F\x8B"
MAGIC_XML = b"\x3c\x3f\x78\x6d\x6c\x20"
IP_ADDRESS_CACHE = ExpiringDict(max_len=10000, max_age_seconds=1800)
class ParserError(RuntimeError):
"""Raised whenever the parser fails for some reason"""
class InvalidDMARCReport(ParserError):
"""Raised when an invalid DMARC report is encountered"""
class InvalidAggregateReport(InvalidDMARCReport):
"""Raised when an invalid DMARC aggregate report is encountered"""
class InvalidForensicReport(InvalidDMARCReport):
"""Raised when an invalid DMARC forensic report is encountered"""
def _parse_report_record(record, offline=False, nameservers=None,
dns_timeout=2.0, parallel=False):
"""
Converts a record from a DMARC aggregate report into a more consistent
format
Args:
record (OrderedDict): The record to convert
offline (bool): Do not query online for geolocation or DNS
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
Returns:
OrderedDict: The converted record
"""
if nameservers is None:
nameservers = ["1.1.1.1", "1.0.0.1",
"2606:4700:4700::1111", "2606:4700:4700::1001",
]
record = record.copy()
new_record = OrderedDict()
new_record_source = get_ip_address_info(record["row"]["source_ip"],
cache=IP_ADDRESS_CACHE,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel)
new_record["source"] = new_record_source
new_record["count"] = int(record["row"]["count"])
policy_evaluated = record["row"]["policy_evaluated"].copy()
new_policy_evaluated = OrderedDict([("disposition", "none"),
("dkim", "fail"),
("spf", "fail"),
("policy_override_reasons", [])
])
if "disposition" in policy_evaluated:
new_policy_evaluated["disposition"] = policy_evaluated["disposition"]
if new_policy_evaluated["disposition"].strip().lower() == "pass":
new_policy_evaluated["disposition"] = "none"
if "dkim" in policy_evaluated:
new_policy_evaluated["dkim"] = policy_evaluated["dkim"]
if "spf" in policy_evaluated:
new_policy_evaluated["spf"] = policy_evaluated["spf"]
reasons = []
spf_aligned = policy_evaluated["spf"] is not None and policy_evaluated[
"spf"].lower() == "pass"
dkim_aligned = policy_evaluated["dkim"] is not None and policy_evaluated[
"dkim"].lower() == "pass"
dmarc_aligned = spf_aligned or dkim_aligned
new_record["alignment"] = dict()
new_record["alignment"]["spf"] = spf_aligned
new_record["alignment"]["dkim"] = dkim_aligned
new_record["alignment"]["dmarc"] = dmarc_aligned
if "reason" in policy_evaluated:
if type(policy_evaluated["reason"]) == list:
reasons = policy_evaluated["reason"]
else:
reasons = [policy_evaluated["reason"]]
for reason in reasons:
if "comment" not in reason:
reason["comment"] = None
new_policy_evaluated["policy_override_reasons"] = reasons
new_record["policy_evaluated"] = new_policy_evaluated
new_record["identifiers"] = record["identifiers"].copy()
new_record["auth_results"] = OrderedDict([("dkim", []), ("spf", [])])
if type(new_record["identifiers"]["header_from"]) == str:
lowered_from = new_record["identifiers"]["header_from"].lower()
else:
lowered_from = ''
new_record["identifiers"]["header_from"] = lowered_from
if record["auth_results"] is not None:
auth_results = record["auth_results"].copy()
if "spf" not in auth_results:
auth_results["spf"] = []
if "dkim" not in auth_results:
auth_results["dkim"] = []
else:
auth_results = new_record["auth_results"].copy()
if type(auth_results["dkim"]) != list:
auth_results["dkim"] = [auth_results["dkim"]]
for result in auth_results["dkim"]:
if "domain" in result and result["domain"] is not None:
new_result = OrderedDict([("domain", result["domain"])])
if "selector" in result and result["selector"] is not None:
new_result["selector"] = result["selector"]
else:
new_result["selector"] = "none"
if "result" in result and result["result"] is not None:
new_result["result"] = result["result"]
else:
new_result["result"] = "none"
new_record["auth_results"]["dkim"].append(new_result)
if type(auth_results["spf"]) != list:
auth_results["spf"] = [auth_results["spf"]]
for result in auth_results["spf"]:
new_result = OrderedDict([("domain", result["domain"])])
if "scope" in result and result["scope"] is not None:
new_result["scope"] = result["scope"]
else:
new_result["scope"] = "mfrom"
if "result" in result and result["result"] is not None:
new_result["result"] = result["result"]
else:
new_result["result"] = "none"
new_record["auth_results"]["spf"].append(new_result)
if "envelope_from" not in new_record["identifiers"]:
envelope_from = None
if len(auth_results["spf"]) > 0:
envelope_from = new_record["auth_results"]["spf"][-1]["domain"]
if envelope_from is not None:
envelope_from = str(envelope_from).lower()
new_record["identifiers"]["envelope_from"] = envelope_from
elif new_record["identifiers"]["envelope_from"] is None:
if len(auth_results["spf"]) > 0:
envelope_from = new_record["auth_results"]["spf"][-1]["domain"]
if envelope_from is not None:
envelope_from = str(envelope_from).lower()
new_record["identifiers"]["envelope_from"] = envelope_from
envelope_to = None
if "envelope_to" in new_record["identifiers"]:
envelope_to = new_record["identifiers"]["envelope_to"]
del new_record["identifiers"]["envelope_to"]
new_record["identifiers"]["envelope_to"] = envelope_to
return new_record
def parse_aggregate_report_xml(xml, offline=False, nameservers=None,
timeout=2.0, parallel=False):
"""Parses a DMARC XML report string and returns a consistent OrderedDict
Args:
xml (str): A string of DMARC aggregate report XML
offline (bool): Do not query online for geolocation or DNS
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
timeout (float): Sets the DNS timeout in seconds
parallel (bool): Parallel processing
Returns:
OrderedDict: The parsed aggregate DMARC report
"""
errors = []
# Parse XML and recover from errors
try:
xmltodict.parse(xml)["feedback"]
except Exception as e:
errors.append("Invalid XML: {0}".format(e.__str__()))
tree = etree.parse(BytesIO(xml.encode('utf-8')),
etree.XMLParser(recover=True))
xml = etree.tostring(tree).decode('utf-8')
try:
# Replace XML header (sometimes they are invalid)
xml = xml_header_regex.sub("<?xml version=\"1.0\"?>", xml)
# Remove invalid schema tags
xml = xml_schema_regex.sub('', xml)
report = xmltodict.parse(xml)["feedback"]
report_metadata = report["report_metadata"]
schema = "draft"
if "version" in report:
schema = report["version"]
new_report = OrderedDict([("xml_schema", schema)])
new_report_metadata = OrderedDict()
if report_metadata["org_name"] is None:
if report_metadata["email"] is not None:
report_metadata["org_name"] = report_metadata[
"email"].split("@")[-1]
org_name = report_metadata["org_name"]
if org_name is not None and " " not in org_name:
org_name = get_base_domain(org_name)
new_report_metadata["org_name"] = org_name
new_report_metadata["org_email"] = report_metadata["email"]
extra = None
if "extra_contact_info" in report_metadata:
extra = report_metadata["extra_contact_info"]
new_report_metadata["org_extra_contact_info"] = extra
new_report_metadata["report_id"] = report_metadata["report_id"]
report_id = new_report_metadata["report_id"]
report_id = report_id.replace("<",
"").replace(">", "").split("@")[0]
new_report_metadata["report_id"] = report_id
date_range = report["report_metadata"]["date_range"]
date_range["begin"] = timestamp_to_human(date_range["begin"])
date_range["end"] = timestamp_to_human(date_range["end"])
new_report_metadata["begin_date"] = date_range["begin"]
new_report_metadata["end_date"] = date_range["end"]
if "error" in report["report_metadata"]:
if type(report["report_metadata"]["error"]) != list:
errors = [report["report_metadata"]["error"]]
else:
errors = report["report_metadata"]["error"]
new_report_metadata["errors"] = errors
new_report["report_metadata"] = new_report_metadata
records = []
policy_published = report["policy_published"]
new_policy_published = OrderedDict()
new_policy_published["domain"] = policy_published["domain"]
adkim = "r"
if "adkim" in policy_published:
if policy_published["adkim"] is not None:
adkim = policy_published["adkim"]
new_policy_published["adkim"] = adkim
aspf = "r"
if "aspf" in policy_published:
if policy_published["aspf"] is not None:
aspf = policy_published["aspf"]
new_policy_published["aspf"] = aspf
new_policy_published["p"] = policy_published["p"]
sp = new_policy_published["p"]
if "sp" in policy_published:
if policy_published["sp"] is not None:
sp = report["policy_published"]["sp"]
new_policy_published["sp"] = sp
pct = "100"
if "pct" in policy_published:
if policy_published["pct"] is not None:
pct = report["policy_published"]["pct"]
new_policy_published["pct"] = pct
fo = "0"
if "fo" in policy_published:
if policy_published["fo"] is not None:
fo = report["policy_published"]["fo"]
new_policy_published["fo"] = fo
new_report["policy_published"] = new_policy_published
if type(report["record"]) == list:
for record in report["record"]:
report_record = _parse_report_record(record,
offline=offline,
nameservers=nameservers,
dns_timeout=timeout,
parallel=parallel)
records.append(report_record)
else:
report_record = _parse_report_record(report["record"],
offline=offline,
nameservers=nameservers,
dns_timeout=timeout,
parallel=parallel)
records.append(report_record)
new_report["records"] = records
return new_report
except expat.ExpatError as error:
raise InvalidAggregateReport(
"Invalid XML: {0}".format(error.__str__()))
except KeyError as error:
raise InvalidAggregateReport(
"Missing field: {0}".format(error.__str__()))
except AttributeError:
raise InvalidAggregateReport("Report missing required section")
except Exception as error:
raise InvalidAggregateReport(
"Unexpected error: {0}".format(error.__str__()))
def extract_xml(input_):
"""
Extracts xml from a zip or gzip file at the given path, file-like object,
or bytes.
Args:
input_: A path to a file, a file like object, or bytes
Returns:
str: The extracted XML
"""
if type(input_) == str:
file_object = open(input_, "rb")
elif type(input_) == bytes:
file_object = BytesIO(input_)
else:
file_object = input_
try:
header = file_object.read(6)
file_object.seek(0)
if header.startswith(MAGIC_ZIP):
_zip = zipfile.ZipFile(file_object)
xml = _zip.open(_zip.namelist()[0]).read().decode(errors='ignore')
elif header.startswith(MAGIC_GZIP):
xml = GzipFile(fileobj=file_object).read().decode(errors='ignore')
elif header.startswith(MAGIC_XML):
xml = file_object.read().decode(errors='ignore')
else:
file_object.close()
raise InvalidAggregateReport("Not a valid zip, gzip, or xml file")
file_object.close()
except UnicodeDecodeError:
raise InvalidAggregateReport("File objects must be opened in binary "
"(rb) mode")
except Exception as error:
raise InvalidAggregateReport(
"Invalid archive file: {0}".format(error.__str__()))
return xml
def parse_aggregate_report_file(_input, offline=False, nameservers=None,
dns_timeout=2.0,
parallel=False):
"""Parses a file at the given path, a file-like object. or bytes as a
aggregate DMARC report
Args:
_input: A path to a file, a file like object, or bytes
offline (bool): Do not query online for geolocation or DNS
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
parallel (bool): Parallel processing
Returns:
OrderedDict: The parsed DMARC aggregate report
"""
xml = extract_xml(_input)
return parse_aggregate_report_xml(xml,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel)
def parsed_aggregate_reports_to_csv_rows(reports):
"""
Converts one or more parsed aggregate reports to list of dicts in flat CSV
format
Args:
reports: A parsed aggregate report or list of parsed aggregate reports
Returns:
list: Parsed aggregate report data as a list of dicts in flat CSV
format
"""
def to_str(obj):
return str(obj).lower()
if type(reports) == OrderedDict:
reports = [reports]
rows = []
for report in reports:
xml_schema = report["xml_schema"]
org_name = report["report_metadata"]["org_name"]
org_email = report["report_metadata"]["org_email"]
org_extra_contact = report["report_metadata"]["org_extra_contact_info"]
report_id = report["report_metadata"]["report_id"]
begin_date = report["report_metadata"]["begin_date"]
end_date = report["report_metadata"]["end_date"]
errors = "|".join(report["report_metadata"]["errors"])
domain = report["policy_published"]["domain"]
adkim = report["policy_published"]["adkim"]
aspf = report["policy_published"]["aspf"]
p = report["policy_published"]["p"]
sp = report["policy_published"]["sp"]
pct = report["policy_published"]["pct"]
fo = report["policy_published"]["fo"]
report_dict = dict(xml_schema=xml_schema, org_name=org_name,
org_email=org_email,
org_extra_contact_info=org_extra_contact,
report_id=report_id, begin_date=begin_date,
end_date=end_date, errors=errors, domain=domain,
adkim=adkim, aspf=aspf, p=p, sp=sp, pct=pct, fo=fo)
for record in report["records"]:
row = report_dict.copy()
row["source_ip_address"] = record["source"]["ip_address"]
row["source_country"] = record["source"]["country"]
row["source_reverse_dns"] = record["source"]["reverse_dns"]
row["source_base_domain"] = record["source"]["base_domain"]
row["count"] = record["count"]
row["spf_aligned"] = record["alignment"]["spf"]
row["dkim_aligned"] = record["alignment"]["dkim"]
row["dmarc_aligned"] = record["alignment"]["dmarc"]
row["disposition"] = record["policy_evaluated"]["disposition"]
policy_override_reasons = list(map(
lambda r: r["type"],
record["policy_evaluated"]
["policy_override_reasons"]))
policy_override_comments = list(map(
lambda r: r["comment"] or "none",
record["policy_evaluated"]
["policy_override_reasons"]))
row["policy_override_reasons"] = ",".join(
policy_override_reasons)
row["policy_override_comments"] = "|".join(
policy_override_comments)
row["envelope_from"] = record["identifiers"]["envelope_from"]
row["header_from"] = record["identifiers"]["header_from"]
envelope_to = record["identifiers"]["envelope_to"]
row["envelope_to"] = envelope_to
dkim_domains = []
dkim_selectors = []
dkim_results = []
for dkim_result in record["auth_results"]["dkim"]:
dkim_domains.append(dkim_result["domain"])
if "selector" in dkim_result:
dkim_selectors.append(dkim_result["selector"])
dkim_results.append(dkim_result["result"])
row["dkim_domains"] = ",".join(map(to_str, dkim_domains))
row["dkim_selectors"] = ",".join(map(to_str, dkim_selectors))
row["dkim_results"] = ",".join(map(to_str, dkim_results))
spf_domains = []
spf_scopes = []
spf_results = []
for spf_result in record["auth_results"]["spf"]:
spf_domains.append(spf_result["domain"])
spf_scopes.append(spf_result["scope"])
spf_results.append(spf_result["result"])
row["spf_domains"] = ",".join(map(to_str, spf_domains))
row["spf_scopes"] = ",".join(map(to_str, spf_scopes))
row["spf_results"] = ",".join(map(to_str, spf_results))
rows.append(row)
for r in rows:
for k, v in r.items():
if type(v) not in [str, int, bool]:
r[k] = ''
return rows
def parsed_aggregate_reports_to_csv(reports):
"""
Converts one or more parsed aggregate reports to flat CSV format, including
headers
Args:
reports: A parsed aggregate report or list of parsed aggregate reports
Returns:
str: Parsed aggregate report data in flat CSV format, including headers
"""
fields = ["xml_schema", "org_name", "org_email",
"org_extra_contact_info", "report_id", "begin_date", "end_date",
"errors", "domain", "adkim", "aspf", "p", "sp", "pct", "fo",
"source_ip_address", "source_country", "source_reverse_dns",
"source_base_domain", "count", "spf_aligned",
"dkim_aligned", "dmarc_aligned", "disposition",
"policy_override_reasons", "policy_override_comments",
"envelope_from", "header_from",
"envelope_to", "dkim_domains", "dkim_selectors", "dkim_results",
"spf_domains", "spf_scopes", "spf_results"]
csv_file_object = StringIO(newline="\n")
writer = DictWriter(csv_file_object, fields)
writer.writeheader()
rows = parsed_aggregate_reports_to_csv_rows(reports)
for row in rows:
writer.writerow(row)
csv_file_object.flush()
return csv_file_object.getvalue()
def parse_forensic_report(feedback_report, sample, msg_date,
offline=False, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
parallel=False):
"""
Converts a DMARC forensic report and sample to a ``OrderedDict``
Args:
feedback_report (str): A message's feedback report as a string
offline (bool): Do not query online for geolocation or DNS
sample (str): The RFC 822 headers or RFC 822 message sample
msg_date (str): The message's date header
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
parallel (bool): Parallel processing
Returns:
OrderedDict: A parsed report and sample
"""
delivery_results = ["delivered", "spam", "policy", "reject", "other"]
try:
parsed_report = OrderedDict()
report_values = feedback_report_regex.findall(feedback_report)
for report_value in report_values:
key = report_value[0].lower().replace("-", "_")
parsed_report[key] = report_value[1]
if "arrival_date" not in parsed_report:
if msg_date is None:
raise InvalidForensicReport(
"Forensic sample is not a valid email")
parsed_report["arrival_date"] = msg_date.isoformat()
if "version" not in parsed_report:
parsed_report["version"] = 1
if "user_agent" not in parsed_report:
parsed_report["user_agent"] = None
if "delivery_result" not in parsed_report:
parsed_report["delivery_result"] = None
else:
for delivery_result in delivery_results:
if delivery_result in parsed_report["delivery_result"].lower():
parsed_report["delivery_result"] = delivery_result
break
if parsed_report["delivery_result"] not in delivery_results:
parsed_report["delivery_result"] = "other"
arrival_utc = human_timestamp_to_datetime(
parsed_report["arrival_date"], to_utc=True)
arrival_utc = arrival_utc.strftime("%Y-%m-%d %H:%M:%S")
parsed_report["arrival_date_utc"] = arrival_utc
ip_address = parsed_report["source_ip"]
parsed_report_source = get_ip_address_info(ip_address,
offline=offline,
nameservers=nameservers,
timeout=dns_timeout,
parallel=parallel)
parsed_report["source"] = parsed_report_source
del parsed_report["source_ip"]
if "identity_alignment" not in parsed_report:
parsed_report["authentication_mechanisms"] = []
elif parsed_report["identity_alignment"] == "none":
parsed_report["authentication_mechanisms"] = []
del parsed_report["identity_alignment"]
else:
auth_mechanisms = parsed_report["identity_alignment"]
auth_mechanisms = auth_mechanisms.split(",")
parsed_report["authentication_mechanisms"] = auth_mechanisms
del parsed_report["identity_alignment"]
if "auth_failure" not in parsed_report:
parsed_report["auth_failure"] = "dmarc"
auth_failure = parsed_report["auth_failure"].split(",")
parsed_report["auth_failure"] = auth_failure
optional_fields = ["original_envelope_id", "dkim_domain",
"original_mail_from", "original_rcpt_to"]
for optional_field in optional_fields:
if optional_field not in parsed_report:
parsed_report[optional_field] = None
parsed_sample = parse_email(
sample,
strip_attachment_payloads=strip_attachment_payloads)
if "reported_domain" not in parsed_report:
parsed_report["reported_domain"] = parsed_sample["from"]["domain"]
sample_headers_only = False
number_of_attachments = len(parsed_sample["attachments"])
if number_of_attachments < 1 and parsed_sample["body"] is None:
sample_headers_only = True
if sample_headers_only and parsed_sample["has_defects"]:
del parsed_sample["defects"]
del parsed_sample["defects_categories"]
del parsed_sample["has_defects"]
parsed_report["sample_headers_only"] = sample_headers_only
parsed_report["sample"] = sample
parsed_report["parsed_sample"] = parsed_sample
return parsed_report
except KeyError as error:
raise InvalidForensicReport("Missing value: {0}".format(
error.__str__()))
except Exception as error:
raise InvalidForensicReport(
"Unexpected error: {0}".format(error.__str__()))
def parsed_forensic_reports_to_csv_rows(reports):
"""
Converts one or more parsed forensic reports to a list of dicts in flat CSV
format
Args:
reports: A parsed forensic report or list of parsed forensic reports
Returns:
list: Parsed forensic report data as a list of dicts in flat CSV format
"""
if type(reports) == OrderedDict:
reports = [reports]
rows = []
for report in reports:
row = report.copy()
row["source_ip_address"] = report["source"]["ip_address"]
row["source_reverse_dns"] = report["source"]["reverse_dns"]
row["source_base_domain"] = report["source"]["base_domain"]
row["source_country"] = report["source"]["country"]
del row["source"]
row["subject"] = report["parsed_sample"]["subject"]
row["auth_failure"] = ",".join(report["auth_failure"])
authentication_mechanisms = report["authentication_mechanisms"]
row["authentication_mechanisms"] = ",".join(
authentication_mechanisms)
del row["sample"]
del row["parsed_sample"]
rows.append(row)
return rows
def parsed_forensic_reports_to_csv(reports):
"""
Converts one or more parsed forensic reports to flat CSV format, including
headers
Args:
reports: A parsed forensic report or list of parsed forensic reports
Returns:
str: Parsed forensic report data in flat CSV format, including headers
"""
fields = ["feedback_type", "user_agent", "version", "original_envelope_id",
"original_mail_from", "original_rcpt_to", "arrival_date",
"arrival_date_utc", "subject", "message_id",
"authentication_results", "dkim_domain", "source_ip_address",
"source_country", "source_reverse_dns", "source_base_domain",
"delivery_result", "auth_failure", "reported_domain",
"authentication_mechanisms", "sample_headers_only"]
csv_file = StringIO()
csv_writer = DictWriter(csv_file, fieldnames=fields)
csv_writer.writeheader()
rows = parsed_forensic_reports_to_csv_rows(reports)
for row in rows:
csv_writer.writerow(row)
return csv_file.getvalue()
def parse_report_email(input_, offline=False, nameservers=None,
dns_timeout=2.0, strip_attachment_payloads=False,
parallel=False):
"""
Parses a DMARC report from an email
Args:
input_: An emailed DMARC report in RFC 822 format, as bytes or a string
offline (bool): Do not query online for geolocation on DNS
nameservers (list): A list of one or more nameservers to use
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
parallel (bool): Parallel processing
Returns:
OrderedDict:
* ``report_type``: ``aggregate`` or ``forensic``
* ``report``: The parsed report
"""
result = None
try:
if is_outlook_msg(input_):
input_ = convert_outlook_msg(input_)
if type(input_) == bytes:
input_ = input_.decode(encoding="utf8", errors="replace")
msg = mailparser.parse_from_string(input_)
msg_headers = json.loads(msg.headers_json)
date = email.utils.format_datetime(datetime.utcnow())
if "Date" in msg_headers:
date = human_timestamp_to_datetime(
msg_headers["Date"])
msg = email.message_from_string(input_)
except Exception as e:
raise InvalidDMARCReport(e.__str__())
subject = None
feedback_report = None
sample = None
if "Subject" in msg_headers:
subject = msg_headers["Subject"]
for part in msg.walk():
content_type = part.get_content_type()
payload = part.get_payload()
if type(payload) != list:
payload = [payload]
payload = payload[0].__str__()
if content_type == "message/feedback-report":
try:
if "Feedback-Type" in payload:
feedback_report = payload
else:
feedback_report = b64decode(payload).__str__()
feedback_report = feedback_report.lstrip(
"b'").rstrip("'")
feedback_report = feedback_report.replace("\\r", "")
feedback_report = feedback_report.replace("\\n", "\n")
except (ValueError, TypeError, binascii.Error):
feedback_report = payload
elif content_type == "text/rfc822-headers":
sample = payload
elif content_type == "message/rfc822":
sample = payload
else:
try:
payload = b64decode(payload)
if payload.startswith(MAGIC_ZIP) or \
payload.startswith(MAGIC_GZIP) or \
payload.startswith(MAGIC_XML):
ns = nameservers
aggregate_report = parse_aggregate_report_file(
payload,
offline=offline,
nameservers=ns,
dns_timeout=dns_timeout,
parallel=parallel)
result = OrderedDict([("report_type", "aggregate"),
("report", aggregate_report)])
return result
except (TypeError, ValueError, binascii.Error):
pass
except InvalidAggregateReport as e:
error = 'Message with subject "{0}" ' \
'is not a valid ' \
'aggregate DMARC report: {1}'.format(subject, e)
raise InvalidAggregateReport(error)
except FileNotFoundError as e:
error = 'Unable to parse message with ' \
'subject "{0}": {1}'.format(subject, e)
raise InvalidDMARCReport(error)
if feedback_report and sample:
try:
forensic_report = parse_forensic_report(
feedback_report,
sample,
date,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=strip_attachment_payloads,
parallel=parallel)
except InvalidForensicReport as e:
error = 'Message with subject "{0}" ' \
'is not a valid ' \
'forensic DMARC report: {1}'.format(subject, e)
raise InvalidForensicReport(error)
except Exception as e:
raise InvalidForensicReport(e.__str__())
result = OrderedDict([("report_type", "forensic"),
("report", forensic_report)])
return result
if result is None:
error = 'Message with subject "{0}" is ' \
'not a valid DMARC report'.format(subject)
raise InvalidDMARCReport(error)
def parse_report_file(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
offline=False, parallel=False):
"""Parses a DMARC aggregate or forensic file at the given path, a
file-like object. or bytes
Args:
input_: A path to a file, a file like object, or bytes
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
offline (bool): Do not make online queries for geolocation or DNS
parallel (bool): Parallel processing
Returns:
OrderedDict: The parsed DMARC report
"""
if type(input_) == str:
logger.debug("Parsing {0}".format(input_))
file_object = open(input_, "rb")
elif type(input_) == bytes:
file_object = BytesIO(input_)
else:
file_object = input_
content = file_object.read()
file_object.close()
try:
report = parse_aggregate_report_file(content,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
parallel=parallel)
results = OrderedDict([("report_type", "aggregate"),
("report", report)])
except InvalidAggregateReport:
try:
sa = strip_attachment_payloads
results = parse_report_email(content,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa,
parallel=parallel)
except InvalidDMARCReport:
raise InvalidDMARCReport("Not a valid aggregate or forensic "
"report")
return results
def get_dmarc_reports_from_mbox(input_, nameservers=None, dns_timeout=2.0,
strip_attachment_payloads=False,
offline=False, parallel=False):
"""Parses a mailbox in mbox format containing e-mails with attached
DMARC reports
Args:
input_: A path to a mbox file
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Sets the DNS timeout in seconds
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
offline (bool): Do not make online queries for geolocation or DNS
parallel (bool): Parallel processing
Returns:
OrderedDict: Lists of ``aggregate_reports`` and ``forensic_reports``
"""
aggregate_reports = []
forensic_reports = []
try:
mbox = mailbox.mbox(input_)
message_keys = mbox.keys()
total_messages = len(message_keys)
logger.debug("Found {0} messages in {1}".format(total_messages,
input_))
for i in range(len(message_keys)):
message_key = message_keys[i]
logger.debug("Processing message {0} of {1}".format(
i+1, total_messages
))
msg_content = mbox.get_string(message_key)
try:
sa = strip_attachment_payloads
parsed_email = parse_report_email(msg_content,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa,
parallel=parallel)
if parsed_email["report_type"] == "aggregate":
aggregate_reports.append(parsed_email["report"])
elif parsed_email["report_type"] == "forensic":
forensic_reports.append(parsed_email["report"])
except InvalidDMARCReport as error:
logger.warning(error.__str__())
except mailbox.NoSuchMailboxError:
raise InvalidDMARCReport("Mailbox {0} does not exist".format(input_))
return OrderedDict([("aggregate_reports", aggregate_reports),
("forensic_reports", forensic_reports)])
def get_imap_capabilities(server):
"""
Returns a list of an IMAP server's capabilities
Args:
server (imapclient.IMAPClient): An instance of imapclient.IMAPClient
Returns (list): A list of capabilities
"""
capabilities = list(map(str, list(server.capabilities())))
for i in range(len(capabilities)):
capabilities[i] = str(capabilities[i]).replace("b'",
"").replace("'",
"")
logger.debug("IMAP server supports: {0}".format(capabilities))
return capabilities
def get_dmarc_reports_from_inbox(connection=None,
host=None,
user=None,
password=None,
port=None,
ssl=True,
verify=True,
timeout=30,
max_retries=4,
reports_folder="INBOX",
archive_folder="Archive",
delete=False,
test=False,
offline=False,
nameservers=None,
dns_timeout=6.0,
strip_attachment_payloads=False,
results=None):
"""
Fetches and parses DMARC reports from an inbox
Args:
connection: An IMAPClient connection to reuse
host: The mail server hostname or IP address
user: The mail server user
password: The mail server password
port: The mail server port
ssl (bool): Use SSL/TLS
verify (bool): Verify SSL/TLS certificate
timeout (float): IMAP timeout in seconds
max_retries (int): The maximum number of retries after a timeout
reports_folder: The IMAP folder where reports can be found
archive_folder: The folder to move processed mail to
delete (bool): Delete messages after processing them
test (bool): Do not move or delete messages after processing them
offline (bool): Do not query onfline for geolocation or DNS
nameservers (list): A list of DNS nameservers to query
dns_timeout (float): Set the DNS query timeout
strip_attachment_payloads (bool): Remove attachment payloads from
forensic report results
results (dict): Results from the previous run
Returns:
OrderedDict: Lists of ``aggregate_reports`` and ``forensic_reports``
"""
if delete and test:
raise ValueError("delete and test options are mutually exclusive")
if connection is None and (user is None or password is None):
raise ValueError("Must supply a connection, or a username and "
"password")
aggregate_reports = []
forensic_reports = []
aggregate_report_msg_uids = []
forensic_report_msg_uids = []
aggregate_reports_folder = "{0}/Aggregate".format(archive_folder)
forensic_reports_folder = "{0}/Forensic".format(archive_folder)
invalid_reports_folder = "{0}/Invalid".format(archive_folder)
if results:
aggregate_reports = results["aggregate_reports"].copy()
forensic_reports = results["forensic_reports"].copy()
if connection:
server = connection
else:
server = IMAPClient(host, user, password, port=port,
ssl=ssl, verify=verify,
timeout=timeout,
max_retries=max_retries,
initial_folder=reports_folder)
server.create_folder(archive_folder)
server.create_folder(aggregate_reports_folder)
server.create_folder(forensic_reports_folder)
server.create_folder(invalid_reports_folder)
messages = server.search()
total_messages = len(messages)
logger.debug("Found {0} messages in {1}".format(len(messages),
reports_folder))
for i in range(len(messages)):
msg_uid = messages[i]
logger.debug("Processing message {0} of {1}: UID {2}".format(
i+1, total_messages, msg_uid
))
msg_content = server.fetch_message(msg_uid, parse=False)
sa = strip_attachment_payloads
try:
parsed_email = parse_report_email(msg_content,
nameservers=nameservers,
dns_timeout=dns_timeout,
offline=offline,
strip_attachment_payloads=sa)
if parsed_email["report_type"] == "aggregate":
aggregate_reports.append(parsed_email["report"])
aggregate_report_msg_uids.append(msg_uid)
elif parsed_email["report_type"] == "forensic":
forensic_reports.append(parsed_email["report"])
forensic_report_msg_uids.append(msg_uid)
except InvalidDMARCReport as error:
logger.warning(error.__str__())
if not test:
if delete:
logger.debug(
"Deleting message UID {0}".format(msg_uid))
server.delete_messages([msg_uid])
else:
logger.debug(
"Moving message UID {0} to {1}".format(
msg_uid, invalid_reports_folder))
server.move_messages([msg_uid], invalid_reports_folder)
if not test:
if delete:
processed_messages = aggregate_report_msg_uids + \
forensic_report_msg_uids
number_of_processed_msgs = len(processed_messages)
for i in range(number_of_processed_msgs):
msg_uid = processed_messages[i]
logger.debug(
"Deleting message {0} of {1}: UID {2}".format(
i + 1, number_of_processed_msgs, msg_uid))
try:
server.delete_messages([msg_uid])
except Exception as e:
message = "Error deleting message UID"
e = "{0} {1}: " "{2}".format(message, msg_uid, e)
logger.error("IMAP error: {0}".format(e))
else:
if len(aggregate_report_msg_uids) > 0:
log_message = "Moving aggregate report messages from"
logger.debug(
"{0} {1} to {2}".format(
log_message, reports_folder,
aggregate_reports_folder))
number_of_agg_report_msgs = len(aggregate_report_msg_uids)
for i in range(number_of_agg_report_msgs):
msg_uid = aggregate_report_msg_uids[i]
logger.debug(
"Moving message {0} of {1}: UID {2}".format(
i+1, number_of_agg_report_msgs, msg_uid))
try:
server.move_messages([msg_uid],
aggregate_reports_folder)
except Exception as e:
message = "Error moving message UID"
e = "{0} {1}: {2}".format(message, msg_uid, e)
logger.error("IMAP error: {0}".format(e))
if len(forensic_report_msg_uids) > 0:
message = "Moving forensic report messages from"
logger.debug(
"{0} {1} to {2}".format(message,
reports_folder,
forensic_reports_folder))
number_of_forensic_msgs = len(forensic_report_msg_uids)
for i in range(number_of_forensic_msgs):
msg_uid = forensic_report_msg_uids[i]
message = "Moving message"
logger.debug("{0} {1} of {2}: UID {2}".format(
message,
i + 1, number_of_forensic_msgs, msg_uid))
try:
server.move_messages([msg_uid],
forensic_reports_folder)
except Exception as e:
e = "Error moving message UID {0}: {1}".format(
msg_uid, e)
logger.error("IMAP error: {0}".format(e))
results = OrderedDict([("aggregate_reports", aggregate_reports),
("forensic_reports", forensic_reports)])
total_messages = len(server.search())
if not test and total_messages > 0:
# Process emails that came in during the last run
results = get_dmarc_reports_from_inbox(
connection=server,
reports_folder=reports_folder,
archive_folder=archive_folder,
delete=delete,
test=test,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=strip_attachment_payloads,
results=results
)
return results
def watch_inbox(host, username, password, callback, port=None, ssl=True,
verify=True, reports_folder="INBOX",
archive_folder="Archive", delete=False, test=False,
idle_timeout=30, offline=False, nameservers=None,
dns_timeout=6.0, strip_attachment_payloads=False):
"""
Use an IDLE IMAP connection to parse incoming emails, and pass the results
to a callback function
Args:
host: The mail server hostname or IP address
username: The mail server username
password: The mail server password
callback: The callback function to receive the parsing results
port: The mail server port
ssl (bool): Use SSL/TLS
verify (bool): Verify the TLS/SSL certificate
reports_folder: The IMAP folder where reports can be found
archive_folder: The folder to move processed mail to
delete (bool): Delete messages after processing them
test (bool): Do not move or delete messages after processing them
idle_timeout (int): Number of seconds to wait for a IMAP IDLE response
offline (bool): Do not query online for geolocation or DNS
nameservers (list): A list of one or more nameservers to use
(Cloudflare's public DNS resolvers by default)
dns_timeout (float): Set the DNS query timeout
strip_attachment_payloads (bool): Replace attachment payloads in
forensic report samples with None
"""
sa = strip_attachment_payloads
def idle_callback(connection):
res = get_dmarc_reports_from_inbox(connection=connection,
reports_folder=reports_folder,
archive_folder=archive_folder,
delete=delete,
test=test,
offline=offline,
nameservers=nameservers,
dns_timeout=dns_timeout,
strip_attachment_payloads=sa)
callback(res)
IMAPClient(host=host, username=username, password=password,
port=port, ssl=ssl, verify=verify,
initial_folder=reports_folder,
idle_callback=idle_callback,
idle_timeout=idle_timeout)
def save_output(results, output_directory="output"):
"""
Save report data in the given directory
Args:
results (OrderedDict): Parsing results
output_directory: The patch to the directory to save in
"""
aggregate_reports = results["aggregate_reports"]
forensic_reports = results["forensic_reports"]
if os.path.exists(output_directory):
if not os.path.isdir(output_directory):
raise ValueError("{0} is not a directory".format(output_directory))
else:
os.makedirs(output_directory)
with open("{0}".format(os.path.join(output_directory, "aggregate.json")),
"w", newline="\n", encoding="utf-8") as agg_json:
agg_json.write(json.dumps(aggregate_reports, ensure_ascii=False,
indent=2))
with open("{0}".format(os.path.join(output_directory, "aggregate.csv")),
"w", newline="\n", encoding="utf-8") as agg_csv:
csv = parsed_aggregate_reports_to_csv(aggregate_reports)
agg_csv.write(csv)
with open("{0}".format(os.path.join(output_directory, "forensic.json")),
"w", newline="\n", encoding="utf-8") as for_json:
for_json.write(json.dumps(forensic_reports, ensure_ascii=False,
indent=2))
with open("{0}".format(os.path.join(output_directory, "forensic.csv")),
"w", newline="\n", encoding="utf-8") as for_csv:
csv = parsed_forensic_reports_to_csv(forensic_reports)
for_csv.write(csv)
samples_directory = os.path.join(output_directory, "samples")
if not os.path.exists(samples_directory):
os.makedirs(samples_directory)
sample_filenames = []
for forensic_report in forensic_reports:
sample = forensic_report["sample"]
message_count = 0
parsed_sample = forensic_report["parsed_sample"]
subject = parsed_sample["filename_safe_subject"]
filename = subject
while filename in sample_filenames:
message_count += 1
filename = "{0} ({1})".format(subject, message_count)
sample_filenames.append(filename)
filename = "{0}.eml".format(filename)
path = os.path.join(samples_directory, filename)
with open(path, "w", newline="\n", encoding="utf-8") as sample_file:
sample_file.write(sample)
def get_report_zip(results):
"""
Creates a zip file of parsed report output
Args:
results (OrderedDict): The parsed results
Returns:
bytes: zip file bytes
"""
def add_subdir(root_path, subdir):
subdir_path = os.path.join(root_path, subdir)
for subdir_root, subdir_dirs, subdir_files in os.walk(subdir_path):
for subdir_file in subdir_files:
subdir_file_path = os.path.join(root_path, subdir, subdir_file)
if os.path.isfile(subdir_file_path):
rel_path = os.path.relpath(subdir_root, subdir_file_path)
subdir_arc_name = os.path.join(rel_path, subdir_file)
zip_file.write(subdir_file_path, subdir_arc_name)
for subdir in subdir_dirs:
add_subdir(subdir_path, subdir)
storage = BytesIO()
tmp_dir = tempfile.mkdtemp()
try:
save_output(results, tmp_dir)
with zipfile.ZipFile(storage, 'w', zipfile.ZIP_DEFLATED) as zip_file:
for root, dirs, files in os.walk(tmp_dir):
for file in files:
file_path = os.path.join(root, file)
if os.path.isfile(file_path):
arcname = os.path.join(os.path.relpath(root, tmp_dir),
file)
zip_file.write(file_path, arcname)
for directory in dirs:
dir_path = os.path.join(root, directory)
if os.path.isdir(dir_path):
zip_file.write(dir_path, directory)
add_subdir(root, directory)
finally:
shutil.rmtree(tmp_dir)
return storage.getvalue()
def email_results(results, host, mail_from, mail_to,
mail_cc=None, mail_bcc=None, port=0,
require_encryption=False, verify=True,
username=None, password=None, subject=None,
attachment_filename=None, message=None):
"""
Emails parsing results as a zip file
Args:
results (OrderedDict): Parsing results
host: Mail server hostname or IP address
mail_from: The value of the message from header
mail_to (list): A list of addresses to mail to
mail_cc (list): A list of addresses to CC
mail_bcc (list): A list addresses to BCC
port (int): Port to use
require_encryption (bool): Require a secure connection from the start
verify (bool): verify the SSL/TLS certificate
username (str): An optional username
password (str): An optional password
subject (str): Overrides the default message subject
attachment_filename (str): Override the default attachment filename
message (str: Override the default plain text body
"""
logging.debug("Emailing report to: {0}".format(",".join(mail_to)))
date_string = datetime.now().strftime("%Y-%m-%d")
if attachment_filename:
if not attachment_filename.lower().endswith(".zip"):
attachment_filename += ".zip"
filename = attachment_filename
else:
filename = "DMARC-{0}.zip".format(date_string)
assert isinstance(mail_to, list)
if subject is None:
subject = "DMARC results for {0}".format(date_string)
if message is None:
message = "DMARC results for {0}".format(date_string)
zip_bytes = get_report_zip(results)
attachments = [(filename, zip_bytes)]
send_email(host, mail_from, mail_to, message_cc=mail_cc,
message_bcc=mail_bcc, port=port,
require_encryption=require_encryption, verify=verify,
username=username, password=password, subject=subject,
attachments=attachments, plain_message=message)
| 41.21341
| 79
| 0.58293
|
1ba378633179d561b40c43b80fc2da841795de5c
| 2,486
|
py
|
Python
|
Cycle GAN/create_cyclegan_dataset.py
|
he71dulu/SEMAAMIP
|
501b912b3bace5475eed103d1c223b6cf574ae32
|
[
"MIT"
] | null | null | null |
Cycle GAN/create_cyclegan_dataset.py
|
he71dulu/SEMAAMIP
|
501b912b3bace5475eed103d1c223b6cf574ae32
|
[
"MIT"
] | null | null | null |
Cycle GAN/create_cyclegan_dataset.py
|
he71dulu/SEMAAMIP
|
501b912b3bace5475eed103d1c223b6cf574ae32
|
[
"MIT"
] | null | null | null |
"""Create datasets for training and testing."""
import csv
import os
import random
import click
from . import cyclegan_datasets
def create_list(foldername, fulldir=True, suffix=".jpg"):
"""
:param foldername: The full path of the folder.
:param fulldir: Whether to return the full path or not.
:param suffix: Filter by suffix.
:return: The list of filenames in the folder with given suffix.
"""
file_list_tmp = os.listdir(foldername)
file_list = []
if fulldir:
for item in file_list_tmp:
if item.endswith(suffix):
file_list.append(os.path.join(foldername, item))
else:
for item in file_list_tmp:
if item.endswith(suffix):
file_list.append(item)
return file_list
@click.command()
@click.option('--image_path_a',
type=click.STRING,
default='CycleGAN_TensorFlow/input/horse2zebra/trainA',
help='The path to the images from domain_a.')
@click.option('--image_path_b',
type=click.STRING,
default='CycleGAN_TensorFlow/input/horse2zebra/trainB',
help='The path to the images from domain_b.')
@click.option('--dataset_name',
type=click.STRING,
default='horse2zebra_train',
help='The name of the dataset in cyclegan_dataset.')
@click.option('--do_shuffle',
type=click.BOOL,
default=False,
help='Whether to shuffle images when creating the dataset.')
def create_dataset(image_path_a, image_path_b,
dataset_name, do_shuffle):
list_a = create_list(image_path_a, True,
cyclegan_datasets.DATASET_TO_IMAGETYPE[dataset_name])
list_b = create_list(image_path_b, True,
cyclegan_datasets.DATASET_TO_IMAGETYPE[dataset_name])
output_path = cyclegan_datasets.PATH_TO_CSV[dataset_name]
num_rows = cyclegan_datasets.DATASET_TO_SIZES[dataset_name]
all_data_tuples = []
for i in range(num_rows):
all_data_tuples.append((
list_a[i % len(list_a)],
list_b[i % len(list_b)]
))
if do_shuffle is True:
random.shuffle(all_data_tuples)
with open(output_path, 'w') as csv_file:
csv_writer = csv.writer(csv_file)
for data_tuple in enumerate(all_data_tuples):
csv_writer.writerow(list(data_tuple[1]))
if __name__ == '__main__':
create_dataset()
| 32.285714
| 78
| 0.637973
|
6f9f28f426a8b0df1ba11fded7ba6b0b9e0f2265
| 7,373
|
py
|
Python
|
applications/hw2/models/menu.py
|
chipsandtea/CMPS183
|
56073fb13035886b68dae125175497af213d24c8
|
[
"BSD-3-Clause"
] | null | null | null |
applications/hw2/models/menu.py
|
chipsandtea/CMPS183
|
56073fb13035886b68dae125175497af213d24c8
|
[
"BSD-3-Clause"
] | null | null | null |
applications/hw2/models/menu.py
|
chipsandtea/CMPS183
|
56073fb13035886b68dae125175497af213d24c8
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# this file is released under public domain and you can use without limitations
# ----------------------------------------------------------------------------------------------------------------------
# Customize your APP title, subtitle and menus here
# ----------------------------------------------------------------------------------------------------------------------
response.logo = A(B('web', SPAN(2), 'py'), XML('™ '),
_class="navbar-brand", _href="http://www.web2py.com/",
_id="web2py-logo")
response.title = request.application.replace('_', ' ').title()
response.subtitle = ''
# ----------------------------------------------------------------------------------------------------------------------
# read more at http://dev.w3.org/html5/markup/meta.name.html
# ----------------------------------------------------------------------------------------------------------------------
## commenting out to avoid error because myconf undefined
# response.meta.author = myconf.get('app.author')
# response.meta.description = myconf.get('app.description')
# response.meta.keywords = myconf.get('app.keywords')
# response.meta.generator = myconf.get('app.generator')
# ----------------------------------------------------------------------------------------------------------------------
# your http://google.com/analytics id
# ----------------------------------------------------------------------------------------------------------------------
response.google_analytics_id = None
# ----------------------------------------------------------------------------------------------------------------------
# this is the main application menu add/remove items as required
# ----------------------------------------------------------------------------------------------------------------------
response.menu = [
(T('Home'), False, URL('default', 'index'), [])
]
DEVELOPMENT_MENU = True
# ----------------------------------------------------------------------------------------------------------------------
# provide shortcuts for development. remove in production
# ----------------------------------------------------------------------------------------------------------------------
def _():
# ------------------------------------------------------------------------------------------------------------------
# shortcuts
# ------------------------------------------------------------------------------------------------------------------
app = request.application
ctr = request.controller
# ------------------------------------------------------------------------------------------------------------------
# useful links to internal and external resources
# ------------------------------------------------------------------------------------------------------------------
response.menu += [
(T('My Sites'), False, URL('admin', 'default', 'site')),
(T('This App'), False, '#', [
(T('Design'), False, URL('admin', 'default', 'design/%s' % app)),
LI(_class="divider"),
(T('Controller'), False,
URL(
'admin', 'default', 'edit/%s/controllers/%s.py' % (app, ctr))),
(T('View'), False,
URL(
'admin', 'default', 'edit/%s/views/%s' % (app, response.view))),
(T('DB Model'), False,
URL(
'admin', 'default', 'edit/%s/models/db.py' % app)),
(T('Menu Model'), False,
URL(
'admin', 'default', 'edit/%s/models/menu.py' % app)),
(T('Config.ini'), False,
URL(
'admin', 'default', 'edit/%s/private/appconfig.ini' % app)),
(T('Layout'), False,
URL(
'admin', 'default', 'edit/%s/views/layout.html' % app)),
(T('Stylesheet'), False,
URL(
'admin', 'default', 'edit/%s/static/css/web2py-bootstrap3.css' % app)),
(T('Database'), False, URL(app, 'appadmin', 'index')),
(T('Errors'), False, URL(
'admin', 'default', 'errors/' + app)),
(T('About'), False, URL(
'admin', 'default', 'about/' + app)),
]),
('web2py.com', False, '#', [
(T('Download'), False,
'http://www.web2py.com/examples/default/download'),
(T('Support'), False,
'http://www.web2py.com/examples/default/support'),
(T('Demo'), False, 'http://web2py.com/demo_admin'),
(T('Quick Examples'), False,
'http://web2py.com/examples/default/examples'),
(T('FAQ'), False, 'http://web2py.com/AlterEgo'),
(T('Videos'), False,
'http://www.web2py.com/examples/default/videos/'),
(T('Free Applications'),
False, 'http://web2py.com/appliances'),
(T('Plugins'), False, 'http://web2py.com/plugins'),
(T('Recipes'), False, 'http://web2pyslices.com/'),
]),
(T('Documentation'), False, '#', [
(T('Online book'), False, 'http://www.web2py.com/book'),
LI(_class="divider"),
(T('Preface'), False,
'http://www.web2py.com/book/default/chapter/00'),
(T('Introduction'), False,
'http://www.web2py.com/book/default/chapter/01'),
(T('Python'), False,
'http://www.web2py.com/book/default/chapter/02'),
(T('Overview'), False,
'http://www.web2py.com/book/default/chapter/03'),
(T('The Core'), False,
'http://www.web2py.com/book/default/chapter/04'),
(T('The Views'), False,
'http://www.web2py.com/book/default/chapter/05'),
(T('Database'), False,
'http://www.web2py.com/book/default/chapter/06'),
(T('Forms and Validators'), False,
'http://www.web2py.com/book/default/chapter/07'),
(T('Email and SMS'), False,
'http://www.web2py.com/book/default/chapter/08'),
(T('Access Control'), False,
'http://www.web2py.com/book/default/chapter/09'),
(T('Services'), False,
'http://www.web2py.com/book/default/chapter/10'),
(T('Ajax Recipes'), False,
'http://www.web2py.com/book/default/chapter/11'),
(T('Components and Plugins'), False,
'http://www.web2py.com/book/default/chapter/12'),
(T('Deployment Recipes'), False,
'http://www.web2py.com/book/default/chapter/13'),
(T('Other Recipes'), False,
'http://www.web2py.com/book/default/chapter/14'),
(T('Helping web2py'), False,
'http://www.web2py.com/book/default/chapter/15'),
(T("Buy web2py's book"), False,
'http://stores.lulu.com/web2py'),
]),
(T('Community'), False, None, [
(T('Groups'), False,
'http://www.web2py.com/examples/default/usergroups'),
(T('Twitter'), False, 'http://twitter.com/web2py'),
(T('Live Chat'), False,
'http://webchat.freenode.net/?channels=web2py'),
]),
]
if DEVELOPMENT_MENU:
_()
if "auth" in locals():
auth.wikimenu()
| 48.189542
| 120
| 0.403906
|
a10abac17391de8fda18dd7fe5aefe9fce3a54e0
| 9,859
|
py
|
Python
|
modules/random_search.py
|
J0nasW/Bachelorarbeit
|
37052be075713f2016d42782397ff875e3e982f8
|
[
"MIT"
] | null | null | null |
modules/random_search.py
|
J0nasW/Bachelorarbeit
|
37052be075713f2016d42782397ff875e3e982f8
|
[
"MIT"
] | null | null | null |
modules/random_search.py
|
J0nasW/Bachelorarbeit
|
37052be075713f2016d42782397ff875e3e982f8
|
[
"MIT"
] | null | null | null |
"""
RL MODULE WITH RANDOM SEARCH
CALL BY: <random_search.py>
RETURN: Parameter Matrices for the inverted Pendulum Problem
Stores Data of best Parameters in '<date>_rs_reward_<reward>.p'
INFO: -
"""
# Some dependencies
import numpy as np # Maths and stuff
import gym.spaces # Simulating the Environments
import pickle
import datetime # For Datestamp on stored files
from .lif import I_syn_calc, I_gap_calc, U_neuron_calc
from .parameters import *
# Initialization----------------------------------------------------------------------------
def initialize(Default_U_leak):
global totalreward, done, info
# Initializing Neurons and Sensors------------------------------------------------------
for i in range(0,4):
x[i] = Default_U_leak
for i in range(0,4):
u[i] = Default_U_leak
#OpenAI Gym Parameters------------------------------------------------------------------------------
totalreward = 0
done = 0
info = 0
#-------------------------------------------------------------------------------------------
# Random Function---------------------------------------------------------------------------
def random_parameters():
# Initialize random parameters for our Neurons and Synapses
# For Synapses
w_in_mat_rnd = np.random.uniform(low = 0, high = 3, size = (4,4))
w_sin_mat_rnd = np.random.uniform(low = 0, high = 3, size = (4,4))
sig_in_mat_rnd = np.random.uniform(low = 0.05, high = 0.5, size = (4,4))
sig_sin_mat_rnd = np.random.uniform(low = 0.05, high = 0.5, size = (4,4))
# For Gap-Junctions
#w_gap_in_mat_rnd = np.random.uniform(low = 0, high = 3, size = (4,4))
w_gap_sin_mat_rnd = np.random.uniform(low = 0, high = 3, size = (4,4))
# For Neurons
C_m_mat_rnd = np.random.uniform(low = 0.001, high = 1, size = (1,4))
G_leak_mat_rnd = np.random.uniform(low = 0.05, high = 5, size = (1,4))
U_leak_mat_rnd = np.random.uniform(low = -80, high = -60, size = (1,4))
return w_in_mat_rnd, w_sin_mat_rnd, sig_in_mat_rnd, sig_sin_mat_rnd, w_gap_sin_mat_rnd, C_m_mat_rnd, G_leak_mat_rnd, U_leak_mat_rnd
#-------------------------------------------------------------------------------------------
# Compute Function--------------------------------------------------------------------------
def compute(x, u, w_in_mat, w_sin_mat, w_gap_sin_mat, sig_in_mat, sig_sin_mat, C_m_mat, G_leak_mat, U_leak_mat):
# Compute all Synapse Currents in this network------------------------------------------
for i in range(0,4):
for j in range (0,4):
# Synapse Currents between Interneurons
if A[i, j] == 1:
# Excitatory Synapse
I_s_inter[i, j] = I_syn_calc(x[i], x[j], E_ex, w_in_mat[i, j], sig_in_mat[i, j], mu)
elif A[i, j] == -1:
# Inhibitory Synapse
I_s_inter[i, j] = I_syn_calc(x[i], x[j], E_in, w_in_mat[i, j], sig_in_mat[i, j], mu)
else:
I_s_inter[i, j] = 0
"""
# There are no Gap Junctions between inter-neurons (for now)
# Gap-Junction Currents between Interneurons
if A_gap[i, j] == 1:
# There is a Gap-Junctions
I_g_inter[i, j] = I_gap_calc(x[i], x[j], w_gap_in_mat[i, j])
else:
I_g_inter[i, j] = 0
"""
for i in range(0,4):
for j in range(0,4):
# Synapse Currents between Sensory and Interneurons
if B[i, j] == 1:
# Inhibitory Synapse (can't be Excitatory)
I_s_sensor[i, j] = I_syn_calc(u[i], u[j], E_in, w_sin_mat[i, j], sig_sin_mat[i, j], mu)
else:
I_s_sensor[i, j] = 0
# Gap-Junction Currents between Sensory and Interneurons
if B_gap[i, j] == 1:
# There is a Gap-Junctions
I_g_sensor[i, j] = I_gap_calc(x[i], x[j], w_gap_sin_mat[i, j])
else:
I_g_sensor[i, j] = 0
#---------------------------------------------------------------------------------------
# Now compute inter Neurons Voltages----------------------------------------------------
for i in range(0,4):
I_syn_inter = I_s_inter.sum(axis = 0) # Creates a 1x5 Array with the Sum of all Columns
I_gap_inter = I_g_inter.sum(axis = 0)
I_syn_stimuli = I_s_sensor.sum(axis = 0)
I_gap_stimuli = I_g_sensor.sum(axis = 0)
x[i], fire[i] = U_neuron_calc(x[i], I_syn_inter[i], I_gap_inter[i], I_syn_stimuli[i], I_gap_stimuli[i], C_m_mat[0,i], G_leak_mat[0,i], U_leak_mat[0,i], v, delta_t)
#---------------------------------------------------------------------------------------
I_syn = np.add(I_syn_inter, I_syn_stimuli)
I_gap = np.add(I_gap_inter, I_gap_stimuli)
return x, u, fire, I_syn, I_gap
#-------------------------------------------------------------------------------------------
# OpenAI Gym--------------------------------------------------------------------------------
def run_episode(env, w_in_mat, w_sin_mat, sig_in_mat, sig_sin_mat, w_gap_sin_mat, C_m_mat, G_leak_mat, U_leak_mat):
global x, u, fire, I_syn, I_gap, action
observation = env.reset()
totalreward = 0
for t in np.arange(t0,T,delta_t): # RUNNING THE EPISODE - Trynig to get 200 Steps in this Episode
# Compute the next Interneuron Voltages along with a possible "fire" Event - Now new with random parameter matrices
x, u, fire, I_syn, I_gap = compute(x, u, w_in_mat, w_sin_mat, sig_in_mat, sig_sin_mat, w_gap_sin_mat, C_m_mat, G_leak_mat, U_leak_mat)
# Decide for an action and making a Step
if fire[0] == 1: # Sensory Neuron AVA is firing - resulting in a REVERSE Action (0)
action = 0
observation, reward, done, info = env.step(action)
totalreward += reward
#print 'RIGHT'
elif fire[3] == 1: # Sensory Neuron AVB is firing - resulting in a FORWARD Action (1)
action = 1
observation, reward, done, info = env.step(action)
totalreward += reward
#print 'LEFT'
else:
#print 'Im not sure :( Going ',action
#action = np.random.randint(0,1) # Tried a random approach - didn't seem to work
observation, reward, done, info = env.step(action) # Have to use the action from the past time step - OpenAI Gym does not provide a "Do nothing"-Action
totalreward += reward
observe(observation)
if done:
break
return totalreward
def observe(observation):
global u
cart_pos = observation[0] # [-2.4 2.4]
#cart_vel = observation[1]
angle = (observation[2] * 360) / (2 * np.pi) # in degrees [-12deg 12deg] (for Simulations)
#angle_velocity = observation[3]
# Adapt, learn, overcome-----------------------------------------------------------------------------------------
# Setting the Angle of the Pole to Sensory Neurons PLM (Phi+) and AVM (Phi-)
if angle > 0:
u[1] = -70 + (50/12) * angle # PLM
u[2] = -70
elif angle == 0:
u[1] = u[2] = -70
else:
u[2] = -70 + (50/12) * angle # AVM
u[1] = -70
# Setting the Cart Position to Sensory Neurons ALM (pos. movement) and PVD (neg. movement)
if cart_pos > 0:
u[3] = -70 + (50/2.4) * cart_pos # ALM
u[0] = -70
elif cart_pos == 0:
u[0] = u[3] = -70
else:
u[0] = -70 + (50/2.4) * cart_pos # PVD
u[3] = -70
'''
# Setting the Anglespeed of the Pole to Sensory Neurons ALM (Phi.+) and PVD (Phi.-)
if angle_velocity >= 0:
u[3] = -70 + (50/5) * angle_velocity # ALM
u[0] = -70
elif cart_pos == 0:
u[0] = u[3] = -70
else:
u[0] = -70 + (50/5) * angle_velocity # PVD
u[3] = -70
'''
#------------------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------
# Main Function-----------------------------------------------------------------------------
#-------------------------------------------------------------------------------------------
def main(simulations):
global x, u, env, action
env_vis = []
action = 0
episodes = 0
best_reward = 0
env = gym.make('CartPole-v0')
for _ in range(simulations):
initialize(Default_U_leak) # Initializing all Sensory- and Interneurons with the desired leakage voltage [-70mV]
episodes += 1 # Episode Counter
w_in_mat, w_sin_mat, sig_in_mat, sig_sin_mat, w_gap_sin_mat, C_m_mat, G_leak_mat, U_leak_mat = random_parameters() # Make some new random parameter Matrices
reward = run_episode(env, w_in_mat, w_sin_mat, sig_in_mat, sig_sin_mat, w_gap_sin_mat, C_m_mat, G_leak_mat, U_leak_mat)
if reward > best_reward:
# Set current reward as new reward
best_reward = reward
# Save Results of the Run with the best reward
Result = [w_in_mat, w_sin_mat, sig_in_mat, sig_sin_mat, w_gap_sin_mat, C_m_mat, G_leak_mat, U_leak_mat]
# Solved the Simulation
if reward == 200:
break
#print 'Episode',episodes,'mit Reward',reward,'.'
print ("The best Reward was:",best_reward)
if best_reward == 200:
print ('I SOLVED IT!')
date = datetime.datetime.now().strftime("%Y%m%d_%H-%M-%S")
best_reward_s = str(int(best_reward))
pickle.dump(Result, open(("parameter_dumps/" + date + "_rs_" + best_reward_s + ".p"), "wb"))
return date, best_reward_s
#-------------------------------------------------------------------------------------------
if __name__=="__main__":
main()
| 39.278884
| 171
| 0.507151
|
c847f47e77f1929c00eec039f448e4652aa6f19a
| 4,292
|
py
|
Python
|
theforce/deprecated/util/arrays.py
|
changwmyung/AutoForce
|
c8a429b05685841e1f17e8655b981753e1d086be
|
[
"MIT"
] | 19
|
2019-10-21T06:56:22.000Z
|
2022-03-14T06:43:42.000Z
|
theforce/deprecated/util/arrays.py
|
17DongGeonKim/AutoForce
|
c8a429b05685841e1f17e8655b981753e1d086be
|
[
"MIT"
] | 8
|
2019-10-23T07:48:49.000Z
|
2022-03-23T03:47:44.000Z
|
theforce/deprecated/util/arrays.py
|
17DongGeonKim/AutoForce
|
c8a429b05685841e1f17e8655b981753e1d086be
|
[
"MIT"
] | 4
|
2021-09-25T11:28:44.000Z
|
2022-02-25T06:50:48.000Z
|
import numpy as np
import warnings
def cat(arrays, axis=0):
lengths = [array.shape[axis] for array in arrays]
cat = np.concatenate(arrays, axis=axis)
spec = lengths + [axis]
return cat, spec
def split(array, spec):
return np.split(array, np.cumsum(spec[:-2]), spec[-1])
class SparseArray:
def __init__(self, shape=(0,)):
# configure the sparse axis (saxis)
try:
self.saxis = shape.index(0)
except ValueError:
raise RuntimeError(
"No sparse axis is defined by setting it to 0 in the input shape!")
if shape.count(0) > 1:
warnings.warn("Multiple 0's in the input shape")
self.shape = shape
# data holders
self.i, self.j, self.a = [], [], []
def add(self, i, j, v):
# many inputs at once
if type(i) == type(j) == type(v) == list:
for a, b, c in zip(*[i, j, v]):
self.add(a, b, c)
return
# make arrays
_i, _j = np.broadcast_arrays(i, j)
_v = np.asarray(v)
# check if input is correct
assert _i.ndim == 1 and _i.shape[0] == v.shape[self.saxis]
assert all([a == b for a, b in zip(v.shape, self.shape) if b != 0])
# check status and covert if needed
if type(self.i) == np.ndarray:
self._split()
self.i += [_i]
self.j += [_j]
self.a += [_v]
def _cat(self):
if type(self.i) == list:
self.i, self._ispec = cat(self.i)
self.j, self._jspec = cat(self.j)
self.a, self._aspec = cat(self.a, self.saxis)
self.i_max = self.i.max()
self.j_max = self.j.max()
def _split(self):
if type(self.i) == np.ndarray:
self.i = split(self.i, self._ispec)
self.j = split(self.j, self._jspec)
self.a = split(self.a, self._aspec)
del self._ispec, self._jspec, self._aspec
def _sort(self, key=1):
""" key: 0->i, 1->j """
if type(self.i) == list:
self._cat()
# get argsort
ij = [a.tolist() for a in [self.j, self.i]]
if key == 0:
ij = ij[1::-1]
_, argsort = zip(*sorted([([a, b], c) for a, b, c in
zip(*[*ij, range(self.i.shape[0])])],
key=lambda x: x[0]))
argsort = np.array(argsort)
# sort tensors
self.i = self.i[argsort]
self.j = self.j[argsort]
self.a = np.take(self.a, argsort, self.saxis)
# get counts, and create split-sizes for future
idx = [self.i, self.j][key]
unique, count = np.unique(idx, return_counts=True)
count = count.tolist()
self._ispec = count + [0]
self._jspec = count + [0]
self._aspec = count + [self.saxis]
# testing -------------------------------------------------------------
def test_cat_split():
# cat and split
a = np.random.uniform(size=(10, 10, 3))
b = np.random.uniform(size=(10, 8, 3))
c = np.random.uniform(size=(10, 9, 3))
t, spec = cat([a, b, c], 1)
# print(t.shape)
#print([a.shape for a in split(t, spec)])
def test_sparse():
a = np.random.uniform(size=(3))
b = np.random.uniform(size=(4))
c = np.random.uniform(size=(5))
S = SparseArray()
S.add(1, list(range(3)), a)
S.add(2, list(range(4)), b)
S.add(3, list(range(5)), c)
S._cat()
#print(S.i.shape, S.j.shape, S.a.shape)
a = np.random.uniform(size=(7, 3, 6))
b = np.random.uniform(size=(7, 4, 6))
c = np.random.uniform(size=(7, 5, 6))
S = SparseArray(shape=(7, 0, 6))
S.add(1, list(range(3)), a)
S.add(2, list(range(4)), b)
S.add(3, list(range(5)), c)
S._cat()
S.add(3, list(range(5)), c)
S._cat()
# print(S.i.shape, S.j.shape, S.a.shape)
# test full sorting
s = SparseArray(shape=(0,))
s.add([4, 3, 2], 1, np.array([4, 3, 2]))
s.add([3, 1], 2, np.array([3, 1]))
s.add([2, 1, 4], 3, np.array([2, 1, 4]))
s.add([1, 3], 4, np.array([1, 3]))
s.add([2], 4, np.array([2]))
s._sort(key=1)
s._split()
print(s.i)
print(s.j)
print(s.a)
if __name__ == '__main__':
test_cat_split()
test_sparse()
| 28.236842
| 83
| 0.50466
|
7e49ab3ba4eecdbd23b0b124a9cdfe2b64e9ac54
| 13,957
|
py
|
Python
|
solvers.py
|
cmpute/EECS558-Project
|
d964059901c62773b475c5d4b40f018ee28a0c73
|
[
"Unlicense"
] | null | null | null |
solvers.py
|
cmpute/EECS558-Project
|
d964059901c62773b475c5d4b40f018ee28a0c73
|
[
"Unlicense"
] | null | null | null |
solvers.py
|
cmpute/EECS558-Project
|
d964059901c62773b475c5d4b40f018ee28a0c73
|
[
"Unlicense"
] | null | null | null |
import time
import numpy as np
import scipy.spatial as sps
from scipy.sparse import csc_matrix
from shapely.geometry import Point, MultiPoint, LineString, mapping
from shapely.ops import unary_union, nearest_points
from tqdm import trange
from easydict import EasyDict as edict
from matplotlib import patches as patches
class BaseSolver:
'''
Solver base, see below for what you need to implement
'''
def __init__(self):
pass
def solve(self, env, max_steps=50):
'''
This function solves the problem given certain environment
'''
raise NotImplementedError("Derived class should implement this function")
def report_solution(self):
'''
This function report the solution in form of a chain of positions
'''
raise NotImplementedError("Derived class should implement this function")
def action(self, state, step):
'''
After solving a environment, generate action for given state based on solution strategy.
The action return the movement to next target point.
'''
raise NotImplementedError("Derived class should implement this function")
def render(self, ax):
'''
Render debug elements onto a figure, this is for debugging
'''
pass
default_settings = dict(
goal_reward=10000,
goal_dist_thres=0.3,
collision_cost=-1000,
safety_weight=1,
safety_scale=5,
time_weight=1,
)
class GridSolver(BaseSolver):
def __init__(self, grid_size=10):
self._grid_size = grid_size
self._grid = None
self._grid_ticks_x = None
self._grid_ticks_y = None
self._grid_length_x = 0
self._grid_length_y = 0
self._start_xy = None
self._end_xy = None
def solve(self, env, max_steps=200, **settings):
configs = edict(default_settings)
configs.update(settings)
print("Preparing mesh...")
self._grid = np.full((self._grid_size, self._grid_size), False)
self._grid_ticks_x = np.linspace(env._area[0], env._area[2], self._grid_size + 1)
self._grid_ticks_y = np.linspace(env._area[1], env._area[3], self._grid_size + 1)
self._grid_length_x = (env._area[2] - env._area[0]) / float(self._grid_size)
self._grid_length_y = (env._area[3] - env._area[1]) / float(self._grid_size)
self._start_xy = [np.searchsorted(self._grid_ticks_x, env._start.x)-1, np.searchsorted(self._grid_ticks_y, env._start.y)-1]
self._end_xy = [np.searchsorted(self._grid_ticks_x, env._end.x)-1, np.searchsorted(self._grid_ticks_y, env._end.y)-1]
print("Preparing matrices")
for ob in env.obstacles:
minx, miny, maxx, maxy = ob.bounds
iminx = np.clip(np.searchsorted(self._grid_ticks_x, minx), 1, self._grid_size) - 1
iminy = np.clip(np.searchsorted(self._grid_ticks_y, miny), 1, self._grid_size) - 1
imaxx = np.clip(np.searchsorted(self._grid_ticks_x, maxx), 1, self._grid_size)
imaxy = np.clip(np.searchsorted(self._grid_ticks_y, maxy), 1, self._grid_size)
self._grid[iminx:imaxx, iminy:imaxy] = True
goal_array = np.zeros((self._grid_size, self._grid_size))
goal_array[self._end_xy[0], self._end_xy[1]] = configs.goal_reward
safety_cost = np.array([[1/(env.obstacles.distance(Point(
self._grid_ticks_x[j] + self._grid_length_x/2,
self._grid_ticks_y[i] + self._grid_length_y/2)) + 1e-8)
for i in range(self._grid_size)]
for j in range(self._grid_size)]
) * configs.safety_weight
# backward induction
tstart = time.time()
values = np.full((self._grid_size + 2, self._grid_size + 2), -np.inf) # two more rows and columns as sentinels
values[1:-1, 1:-1] = goal_array
best_actions = []
for _ in trange(max_steps, desc="Backward induction..."):
# actions are up(0), right(1), down(2), left(3)
values[1:-1, 2:][self._grid] = -np.inf # you cannot go to blocked area
best_n2 = np.argmax([ # center block
values[1:-1, 2:], # up
values[2:, 1:-1], # right
values[1:-1, :-2], # down
values[:-2, 1:-1], # left
], axis=0)
best_actions.append(best_n2)
new_values = np.full((self._grid_size + 2, self._grid_size + 2), -np.inf)
new_values[1:-1, 1:-1] = -(safety_cost + configs.time_weight)
new_values[1:-1, 1:-1][best_n2 == 0] += values[1:-1, 2:][best_n2 == 0]
new_values[1:-1, 1:-1][best_n2 == 1] += values[2:, 1:-1][best_n2 == 1]
new_values[1:-1, 1:-1][best_n2 == 2] += values[1:-1, :-2][best_n2 == 2]
new_values[1:-1, 1:-1][best_n2 == 3] += values[:-2, 1:-1][best_n2 == 3]
values = new_values
tend = time.time()
self._solution = np.array(list(reversed(best_actions)))
if not np.all(values[1:-1, 1:-1][self._grid] >= 0):
print("!!! No feasible solution found in given steps !!!")
return tend - tstart
def report_solution(self, start_xy=None):
loc = start_xy or self._start_xy
loc_list = []
for sol_t in self._solution:
action = sol_t[loc[0], loc[1]]
if action == 0:
loc[1] += 1
elif action == 1:
loc[0] += 1
elif action == 2:
loc[1] -= 1
elif action == 3:
loc[0] -= 1
loc_list.append(list(loc))
if loc == self._end_xy:
break
return loc_list
def action(self, state, step):
# TODO!!: there're problems on edge of the grid
ix = np.clip(np.searchsorted(self._grid_ticks_x, state[0]), 1, self._grid_size) - 1
iy = np.clip(np.searchsorted(self._grid_ticks_y, state[1]), 1, self._grid_size) - 1
step = step % len(self._solution) # XXX: what to do if need more steps
action = self._solution[0][ix, iy]
if action == 0:
return 0, self._grid_length_y
elif action == 1:
return self._grid_length_x, 0
elif action == 2:
return 0, -self._grid_length_y
elif action == 3:
return -self._grid_length_x, 0
def render(self, ax):
# draw grids
for x in self._grid_ticks_x:
ax.axvline(x, ls='--')
for y in self._grid_ticks_y:
ax.axhline(y, ls='--')
# draw grid values
for i in range(self._grid_size):
for j in range(self._grid_size):
if self._grid[i,j]:
rect = patches.Rectangle((self._grid_ticks_x[i], self._grid_ticks_y[j]),
self._grid_length_x, self._grid_length_y, color="#f1a20888")
ax.add_patch(rect)
# draw solution
solution = self.report_solution()
for i in range(len(solution) - 1):
x1 = self._grid_ticks_x[solution[i][0]] + self._grid_length_x/2
y1 = self._grid_ticks_y[solution[i][1]] + self._grid_length_y/2
x2 = self._grid_ticks_x[solution[i+1][0]] + self._grid_length_x/2
y2 = self._grid_ticks_y[solution[i+1][1]] + self._grid_length_y/2
ax.plot([x1, x2], [y1, y2], lw=4, c='green')
class SampleGraphSolver(BaseSolver):
'''
This solver generate random samples and connect them together into a graph with constraint check
'''
def __init__(self, sample_num=100):
self._sample_num = sample_num
self._samples = None
self._connections = None
self._solution = None # store solution for a certain case
self._safety_cost_cache = None # for visualization
def _generate_mesh(self, env):
# generate nodes, first node is the goal
points = env._generate_points(self._sample_num - 2)
points = np.concatenate(([[env._end.x, env._end.y], [env._start.x, env._start.y]], points))
while True:
dist_list = np.array([Point(xy[0], xy[1]).distance(env.obstacles) for xy in points])
collision_mask = dist_list < 1e-5
collision_count = np.sum(collision_mask)
if collision_count == 0:
break
# resample
points[collision_mask] = env._generate_points(collision_count)
self._samples = MultiPoint(points)
# generate triangles
tesselation = sps.Delaunay(points)
triangles = tesselation.simplices.copy()
edges = set()
for tri in triangles:
sortnodes = np.sort(tri)
edges.add((sortnodes[0], sortnodes[1]))
edges.add((sortnodes[1], sortnodes[2]))
edges.add((sortnodes[0], sortnodes[2]))
line_list = []
obstacle_union = unary_union(env.obstacles)
for n1, n2 in edges:
line = LineString([self._samples[n1], self._samples[n2]])
if line.intersection(obstacle_union).is_empty:
line_list.append((n1, n2))
self._connections = line_list
def solve(self, env, max_steps=50, early_stop=True, **settings):
configs = edict(default_settings)
configs.update(settings)
print("Preparing mesh...")
self._generate_mesh(env)
print("Preparing matrices...")
dist_list = [configs.time_weight * self._samples.geoms[n1].distance(self._samples.geoms[n2]) for n1, n2 in self._connections] * 2
connection_list = self._connections + [(n2, n1) for n1, n2 in self._connections]
adj_matrix = csc_matrix((dist_list, zip(*connection_list)), shape=(self._sample_num, self._sample_num))
safety_type = configs.get("safety_type", "linear")
if safety_type == 'reciprocal':
safety_cost = np.array([1/env.obstacles.distance(p) for p in self._samples]) * configs.safety_weight
elif safety_type == 'linear':
safety_cost = -np.array([env.obstacles.distance(p) for p in self._samples]) * configs.safety_weight
elif safety_type == 'tanh':
safety_cost = -np.array([np.tanh(env.obstacles.distance(p) / configs.safety_scale) for p in self._samples]) * configs.safety_weight
self._safety_cost_cache = safety_cost
goal_array = np.zeros(self._sample_num)
goal_array[0] = configs.goal_reward # In backward induction, we require exact arrival
print("Connectivity check...")
stack = [1] # start from intial point
connect_flag = np.full(self._sample_num, False)
while len(stack) > 0:
node = stack.pop()
for next_node in adj_matrix[node].nonzero()[1]:
if not connect_flag[next_node]:
stack.append(next_node)
connect_flag[next_node] = True
if not connect_flag[0]:
raise RuntimeError("Initial point and end point is not connected! Consider add more samples")
# backward induction
tstart = time.time()
values = np.copy(goal_array)
best_actions = []
# XXX: should this be max step limit or converge condition?
for _ in trange(max_steps, desc="Backward induction..."):
new_values = np.empty(self._sample_num)
new_actions = np.empty(self._sample_num, dtype=int)
for n1 in range(self._sample_num):
mask = adj_matrix[n1].nonzero()[1]
if len(mask) == 0:
continue
rewards = values[mask]
rewards += goal_array[n1] # goal reward
rewards -= safety_cost[n1] # safety cost
rewards -= adj_matrix[n1, mask].toarray().ravel() # distance cost
best_n2 = np.argmax(rewards)
new_actions[n1] = mask[best_n2] # store in forward direction
new_values[n1] = rewards[best_n2]
values = new_values
best_actions.append(new_actions)
if np.all(new_values[connect_flag] >= configs.goal_reward):
if early_stop:
print("Early stopped")
break
tend = time.time()
self._solution = np.array(list(reversed(best_actions)))
if not np.all(new_values[connect_flag] >= configs.goal_reward):
print("!!! No feasible solution found in given steps !!!")
return tend - tstart
def report_solution(self, start_sample_index=1):
node_list = [start_sample_index]
for row in self._solution:
node_list.append(row[node_list[-1]])
if node_list[-1] == 0:
break
return node_list
def render(self, ax):
ax.scatter([p.x for p in self._samples], [p.y for p in self._samples])
solution = self.report_solution()
solution_set = set()
for i in range(len(solution) - 1):
solution_set.add((solution[i], solution[i+1]))
for n1, n2 in self._connections:
if (n1, n2) in solution_set or (n2, n1) in solution_set:
color = "green"
lwidth = 4
else:
color = "gray"
lwidth = 1
ax.plot([self._samples[n1].x, self._samples[n2].x], [self._samples[n1].y, self._samples[n2].y], lw=lwidth, c=color)
def action(self, state, step):
dist = np.array([(state[0] - p.x, state[1] - p.y) for p in self._samples])
nearest = np.argmin(np.linalg.norm(dist, axis=1))
step = step % len(self._solution) # XXX: what to do if need more steps
target = self._samples[self._solution[step, nearest]]
return target.x - state[0], target.y - state[1]
class CellSolver(BaseSolver):
'''
This solver generate cells and find a path between the cells
'''
def __init__(self):
pass
| 41.292899
| 143
| 0.592678
|
57e0e622d472ecf3933e315ff3f55c6874058ad9
| 2,277
|
py
|
Python
|
examples/poses_and_cameras.py
|
martiege/pylie
|
f44c464d166b4e7b695b489153dfaf11c14fe1bb
|
[
"BSD-3-Clause"
] | null | null | null |
examples/poses_and_cameras.py
|
martiege/pylie
|
f44c464d166b4e7b695b489153dfaf11c14fe1bb
|
[
"BSD-3-Clause"
] | null | null | null |
examples/poses_and_cameras.py
|
martiege/pylie
|
f44c464d166b4e7b695b489153dfaf11c14fe1bb
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import visgeom as vg
import matplotlib
import matplotlib.pyplot as plt
from pylie import SO3, SE3
# Use Qt 5 backend in visualisation.
matplotlib.use('qt5agg')
# Create figure and axis.
fig = plt.figure()
ax = plt.axes(projection='3d')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
# Plot the pose of the world North-East-Down (NED) frame (relative to the world frame).
T_w_w = SE3()
vg.plot_pose(ax, T_w_w.to_tuple(), scale=3, text='$\mathcal{F}_w$')
# Plot the body frame (a body-fixed Forward-Right-Down (FRD) frame).
roll = np.radians(-10)
pitch = np.radians(0)
yaw = np.radians(135)
t_w_b = np.array([[-10, -10, -2]]).T
T_w_b = SE3((SO3.from_roll_pitch_yaw(roll, pitch, yaw), t_w_b))
vg.plot_pose(ax, T_w_b.to_tuple(), scale=3, text='$\mathcal{F}_b$')
# Plot the camera frame.
# The camera is placed 2 m directly above the body origin.
# Its optical axis points to the left (in opposite direction of the y-axis in F_b).
# Its y-axis points downwards along the z-axis of F_b.
R_b_c = np.array([[1, 0, 0],
[0, 0, -1],
[0, 1, 0]])
t_b_c = np.array([[0, 0, -2]]).T
T_b_c = SE3((SO3(R_b_c), t_b_c))
T_w_c = T_w_b @ T_b_c
vg.plot_pose(ax, T_w_c.to_tuple(), scale=3, text='$\mathcal{F}_c$')
# Plot obstacle frame.
# The cube is placed at (North: 10 m, East: 10 m, Down: -1 m).
# Its top points upwards, and its front points south.
R_w_o = np.array([[-1, 0, 0],
[0, 1, 0],
[0, 0, -1]])
t_w_o = np.array([[10, 10, -1]]).T
T_w_o = SE3((SO3(R_w_o), t_w_o))
vg.plot_pose(ax, T_w_o.to_tuple(), scale=3, text='$\mathcal{F}_o$')
# Plot the cube with sides 3 meters.
points_o = vg.utils.generate_box(scale=3)
points_w = T_w_o * points_o
vg.utils.plot_as_box(ax, points_w)
# Plot the image plane.
img_plane_scale = 3
K = np.array([[50, 0, 40],
[0, 50, 30],
[0, 0, 1]])
vg.plot_camera_image_plane(ax, K, T_w_c.to_tuple(), scale=img_plane_scale)
# Project the box onto the normalised image plane (at z=img_plane_scale).
points_c = T_w_c.inverse() @ T_w_o * points_o
xn = points_c / points_c[2, :]
xn_w = T_w_c * (img_plane_scale * xn)
vg.utils.plot_as_box(ax, xn_w)
# Show figure.
vg.plot.axis_equal(ax)
ax.invert_zaxis()
ax.invert_yaxis()
plt.show()
| 30.77027
| 87
| 0.656126
|
5eaea3c9f67656a09c5bfa20eff71c63f39eb8eb
| 5,690
|
py
|
Python
|
test_scripts/kalman/MPC_observer.py
|
forgi86/pyMPC
|
291db149554767a035fcb01df3fed7a6b3fe60e4
|
[
"MIT"
] | 84
|
2019-05-28T09:27:37.000Z
|
2022-03-31T08:38:23.000Z
|
test_scripts/kalman/MPC_observer.py
|
passion4energy/pyMPC
|
4b004ba707dab49cd36d96a3575b8593c870a904
|
[
"MIT"
] | 2
|
2020-04-17T00:03:27.000Z
|
2021-01-30T11:35:58.000Z
|
test_scripts/kalman/MPC_observer.py
|
passion4energy/pyMPC
|
4b004ba707dab49cd36d96a3575b8593c870a904
|
[
"MIT"
] | 20
|
2019-10-13T13:50:16.000Z
|
2022-03-31T08:38:25.000Z
|
import numpy as np
import scipy.sparse as sparse
import time
import matplotlib.pyplot as plt
from pyMPC.mpc import MPCController
from kalman import kalman_filter
if __name__ == '__main__':
# Constants #
M = 0.5
m = 0.2
b = 0.1
ftheta = 0.1
l = 0.3
g = 9.81
Ts = 25e-3
Ac =np.array([[0, 1, 0, 0],
[0, -b/M, -(g*m)/M, (ftheta*m)/M],
[0, 0, 0, 1],
[0, b/(M*l), (M*g + g*m)/(M*l), -(M*ftheta + ftheta*m)/(M*l)]])
Bc = np.array([
[0.0],
[1.0/M],
[0.0],
[-1/(M*l)]
])
Cc = np.array([[1., 0., 0., 0.],
[0., 0., 1., 0.]])
Dc = np.zeros((2,1))
[nx, nu] = Bc.shape # number of states and number or inputs
# Brutal forward euler discretization
Ad = np.eye(nx) + Ac*Ts
Bd = Bc*Ts
Cd = Cc
Dd = Dc
std_npos = 0.01
std_ntheta = 0.01
# Reference input and states
xref = np.array([0.3, 0.0, 0.0, 0.0]) # reference state
uref = np.array([0.0]) # reference input
uminus1 = np.array([0.0]) # input at time step negative one - used to penalize the first delta u at time instant 0. Could be the same as uref.
# Constraints
xmin = np.array([-10.0, -10.0, -100, -100])
xmax = np.array([10.0, 10.0, 100, 100])
umin = np.array([-20])
umax = np.array([20])
Dumin = np.array([-5])
Dumax = np.array([5])
# Objective function weights
Qx = sparse.diags([1.0, 0, 1.0, 0]) # Quadratic cost for states x0, x1, ..., x_N-1
QxN = sparse.diags([1.0, 0, 1.0, 0]) # Quadratic cost for xN
Qu = 0.0 * sparse.eye(1) # Quadratic cost for u0, u1, ...., u_N-1
QDu = 0.1 * sparse.eye(1) # Quadratic cost for Du0, Du1, ...., Du_N-1
# Initial state
phi0 = 15*2*np.pi/360
x0 = np.array([0, 0, phi0, 0]) # initial state
# Prediction horizon
Np = 40
K = MPCController(Ad,Bd,Np=Np, x0=x0,xref=xref,uminus1=uminus1,
Qx=Qx, QxN=QxN, Qu=Qu,QDu=QDu,
xmin=xmin,xmax=xmax,umin=umin,umax=umax,Dumin=Dumin,Dumax=Dumax)
K.setup()
# Kalman filter setup
Cd = Cc
Dd = Dc
[nx, nu] = Bd.shape # number of states and number or inputs
ny = np.shape(Cc)[0]
# Kalman filter extended matrices
Bd_kal = np.hstack([Bd, np.eye(nx)])
Dd_kal = np.hstack([Dd, np.zeros((ny, nx))])
Q_kal = np.diag([10,10,10,10])#np.eye(nx) * 100
R_kal = np.eye(ny) * 1
#Bd_kal = np.hstack([Bd, Bd])
#Dd_kal = np.hstack([Dd, Dd])
#Q_kal = np.eye(nu) * 1
#R_kal = np.eye(ny) * 1
L,P,W = kalman_filter(Ad, Bd_kal, Cd, Dd_kal, Q_kal, R_kal)
# Simulate in closed loop
[nx, nu] = Bd.shape # number of states and number or inputs
len_sim = 100 # simulation length (s)
nsim = int(len_sim/Ts) # simulation length(timesteps)
x_vec = np.zeros((nsim, nx))
y_vec = np.zeros((nsim, ny))
x_est_vec = np.zeros((nsim, nx))
u_vec = np.zeros((nsim, nu))
t_vec = np.arange(0, nsim) * Ts
time_start = time.time()
x_step = x0
x_step_est = x0
uMPC = uminus1
for i in range(nsim):
# Output for step i
# System
y_step = Cd.dot(x_step) # y[k+1]
ymeas_step = y_step
ymeas_step[0] += std_npos * np.random.randn()
ymeas_step[1] += std_ntheta * np.random.randn()
# Estimator
yest_step = Cd.dot(x_step_est)
# MPC
uMPC = K.output() # MPC step (u_k value)
# Save output for step i
y_vec[i,:] = y_step
u_vec[i,:] = uMPC
x_vec[i,:] = x_step
x_est_vec[i, :] = x_step_est
# Update i+1
# System
F = uMPC
v = x_step[1]
theta = x_step[2]
omega = x_step[3]
der = np.zeros(nx)
der[0] = v
der[1] = (m*l*np.sin(theta)*omega**2 -m*g*np.sin(theta)*np.cos(theta) + m*ftheta*np.cos(theta)*omega + F - b*v)/(M+m*(1-np.cos(theta)**2))
der[2] = omega
der[3] = ((M+m)*(g*np.sin(theta) - ftheta*omega) - m*l*omega**2*np.sin(theta)*np.cos(theta) -(F-b*v)*np.cos(theta))/(l*(M + m*(1-np.cos(theta)**2)) )
x_step = x_step + der * Ts # x[k+1] #x_step = Ad.dot(x_step) + Bd.dot(uMPC)
# Estimator
x_step_est = Ad.dot(x_step_est) + Bd.dot(uMPC) # x[k+1|k]
x_step_est = x_step_est + L @ (ymeas_step - yest_step) # x[k+1|k+1]
# MPC update
K.update(x_step_est, uMPC) # update with measurement #
time_sim = time.time() - time_start
fig,axes = plt.subplots(5,1, figsize=(10,10), sharex=True)
axes[0].plot(t_vec, x_vec[:, 0], "k", label='p')
axes[0].plot(t_vec, xref[0] * np.ones(np.shape(t_vec)), "r--", label="p_ref")
axes[0].plot(t_vec, x_est_vec[:, 0], "b", label="p_est")
axes[0].set_ylabel("Position (m)")
axes[1].plot(t_vec, x_vec[:, 1], "k", label='v')
axes[1].plot(t_vec, x_est_vec[:, 1], "b", label="v_est")
axes[1].set_ylabel("Velocity (m/s)")
axes[2].plot(t_vec, x_vec[:, 2] * 360 / 2 / np.pi, label="phi")
axes[2].plot(t_vec, x_est_vec[:, 2] * 360 / 2 / np.pi, "b", label="phi_est")
axes[2].set_ylabel("Angle (deg)")
axes[3].plot(t_vec, x_vec[:, 3], label="omega")
axes[3].plot(t_vec, x_est_vec[:, 3], "b", label="omega_est")
axes[3].set_ylabel("Anglular speed (rad/sec)")
axes[4].plot(t_vec, u_vec[:, 0], label="u")
axes[4].plot(t_vec, uref * np.ones(np.shape(t_vec)), "r--", label="u_ref")
axes[4].set_ylabel("Force (N)")
for ax in axes:
ax.grid(True)
ax.legend()
| 31.092896
| 157
| 0.529174
|
c99ecd4ce04ed47d8858572802013431ca03b168
| 880
|
py
|
Python
|
setup.py
|
nagareproject/editor
|
975d0362e98527ed5ceb41a8310a897e08987581
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
nagareproject/editor
|
975d0362e98527ed5ceb41a8310a897e08987581
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
nagareproject/editor
|
975d0362e98527ed5ceb41a8310a897e08987581
|
[
"BSD-3-Clause"
] | null | null | null |
# Encoding: utf-8
# --
# Copyright (c) 2008-2021 Net-ng.
# All rights reserved.
#
# This software is licensed under the BSD License, as described in
# the file LICENSE.txt, which you should have received as part of
# this distribution.
# --
import os
from setuptools import find_packages, setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as description:
LONG_DESCRIPTION = description.read()
setup(
name='nagare-editor',
author='Net-ng',
author_email='alain.poirier@net-ng.com',
description='Nagare editor, validators and functional variables',
long_description=LONG_DESCRIPTION,
license='BSD',
keywords='',
url='https://github.com/nagareproject/editor',
packages=find_packages(),
zip_safe=False,
setup_requires=['setuptools_scm'],
use_scm_version=True,
install_requires=['nagare-services-i18n']
)
| 26.666667
| 80
| 0.715909
|
38359e141e6c6be9dd088208acf7d9a66135b8d2
| 14,487
|
py
|
Python
|
projx/nxprojx.py
|
CrystallineCat/projx
|
a50fb4fa8f24a6267ac57b08dd477422a8549f70
|
[
"MIT"
] | null | null | null |
projx/nxprojx.py
|
CrystallineCat/projx
|
a50fb4fa8f24a6267ac57b08dd477422a8549f70
|
[
"MIT"
] | null | null | null |
projx/nxprojx.py
|
CrystallineCat/projx
|
a50fb4fa8f24a6267ac57b08dd477422a8549f70
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
These are the core functions/classes for interacting with NetworkX.
"""
from itertools import chain
import networkx as nx
def reset_index(graph):
"""
This clobbers your nodes, best protect them.
:param graph: networx.Graph
:returns: networkx.Graph
"""
mapping = dict(zip(graph.nodes(), range(0, graph.number_of_nodes())))
# Change nodes to integers.
graph = nx.relabel_nodes(graph, mapping)
return graph
def match(node_type_seq, edge_type_seq, graph, node_alias=None,
node_type_attr="type", edge_type_attr="type"):
"""
Executes traversals to perform initial match on pattern.
:param graph: networkx.Graph
:returns: List of lists. The matched paths.
"""
start_type = node_type_seq[0]
path_list = []
for node, attrs in graph.nodes(data=True):
if attrs[node_type_attr] == start_type or not start_type:
paths = traverse(node, node_type_seq[1:], edge_type_seq, graph,
node_alias, node_type_attr, edge_type_attr)
path_list.append(paths)
paths = chain.from_iterable(path_list)
return paths
def project(source, target, graph, method="jaccard", params=None, attrs=None,
node_type_attr="type", edge_type_attr="type"):
"""
Executes graph "PROJECT" projection.
:param source: Int. Source node for transformation.
:param target: Int. Target node for transformation.
:param attrs: Dict. Attrs to be set during transformation.
:param graph: networkx.Graph. Graph of subgraph to transform.
:returns: networkx.Graph. A projected copy of the wrapped graph
or its subgraph.
"""
if params is None:
params = []
if attrs is None:
attrs = {}
if method in ["jaccard", "newman"]:
snbrs = {node for node in graph[source].keys()
if graph.node[node][node_type_attr] in params}
tnbrs = {node for node in graph[target].keys()
if graph.node[node][node_type_attr] in params}
intersect = snbrs & tnbrs
if method == "jaccard":
union = snbrs | tnbrs
weight = float(len(intersect)) / len(union)
elif method == "newman":
weight = sum([1.0 / (len(graph[n]) - 1) for n in intersect
if len(graph[n]) > 1])
attrs["weight"] = weight
if graph.has_edge(source, target):
edge_attrs = graph[source][target]
merged_attrs = merge_attrs(attrs, edge_attrs,
[edge_type_attr, "weight", "label"])
graph[source][target].update(merged_attrs)
graph[target][source].update(merged_attrs)
else:
graph.add_edge(source, target, **attrs)
return graph
def transfer(source, target, graph, method="edges", params=None, attrs=None,
node_type_attr="type", edge_type_attr="type", **kwargs):
"""
Execute a graph "TRANSFER" projection.
:param source: Int. Source node for transformation.
:param target: Int. Target node for transformation.
:param attrs: Dict. Attrs to be set during transformation.
:param graph: networkx.Graph. Graph of subgraph to transform.
:returns: networkx.Graph. A projected copy of the wrapped graph
or its subgraph.
"""
if params is None:
params = []
if attrs is None:
attrs = {}
if method == "edges":
nbrs = {k: v for (k, v) in graph[source].items()
if graph.node[k][node_type_attr] in params}
edges = zip([target] * len(nbrs), nbrs,
[v for (k, v) in nbrs.items()])
graph = _add_edges_from(graph, edges)
old_attrs = graph.node[target]
merged_attrs = merge_attrs(attrs, old_attrs,
[node_type_attr, "label", "role"])
graph.nodes[target].clear()
graph.nodes[target].update(merged_attrs)
return graph
def combine(source, target, graph, node_id="", attrs=None,
node_type_attr="type", edge_type_attr="type"):
"""
Executes graph "COMBINE" projection.
:param source: Int. Source node for transformation.
:param target: Int. Target node for transformation.
:param attrs: Dict. Attrs to be set during transformation.
:param graph: networkx.Graph. Graph of subgraph to transform.
:param node_id: Int. Id for new node, will autoassign, but
:returns: networkx.Graph. A projected copy of the wrapped graph
or its subgraph.
"""
if attrs is None:
attrs = {}
if not node_id:
try:
node_id = max(graph.nodes())
except:
raise Exception("Please specify a kwarg 'node_id'")
node_type = attrs.get(node_type_attr, "")
if not node_type:
node_type = "{0}_{1}".format(
graph.node[source][node_type_attr],
graph.node[target][node_type_attr]
)
attrs[node_type_attr] = node_type
graph.add_node(node_id, **attrs)
nbrs = dict(graph[source])
nbrs.update(dict(graph[target]))
nbrs = {k: v for (k, v) in nbrs.items()
if graph.node[k][node_type_attr] != node_type}
edges = zip([node_id] * len(nbrs), nbrs,
[v for (_, v) in nbrs.items()])
graph = _add_edges_from(graph, edges)
return graph
def traverse(start, node_type_seq, edge_type_seq, graph,
node_alias=None, node_type_attr="type", edge_type_attr="type"):
"""
This is a controlled depth, depth first traversal of a NetworkX
graph and the core of this library. Criteria for searching depends
on a start node and a sequence of types as designated by the node/edge
type seq. It does not allow cycles or backtracking. Could be very memory
inefficient in very dense graph with 3 + type queries.
:param start: Integer. Starting point for the traversal.
:param node_type_seq: List of strings. Derived from the match pattern.
:param node_type_seq: List of strings. Derived from the match pattern.
:param graph: networkx.Graph
:returns: List of lists. All matched paths.
"""
# Initialize a stack to keep
# track of traversal progress.
stack = [start]
# Store all valid paths based on type sequence.
paths = []
# Keep track of traversal moves to avoid cycles.
visited_from = {}
# The traversal will begin at the designated start point.
current = start
# Track depth from start node to watch for successful sequence match.
depth = 0
# This is the len of a successful sequence.
max_depth = len(node_type_seq)
# When the stack runs out, all candidate nodes have been visited.
while len(stack) > 0:
# Traverse!
if depth < max_depth:
nbrs = set(graph[current]) - set([current])
for nbr in nbrs:
edge_type = graph[current][nbr].get(
edge_type_attr,
None
)
attrs = graph.node[nbr]
# Here check candidate node validity.
# Make sure this path hasn"t been checked already.
# Make sure it matches the type sequence.
# Make sure it"s not backtracking on same path.
visited_from.setdefault(nbr, [])
if (current not in visited_from[nbr] and
nbr not in stack and
(edge_type == edge_type_seq[depth] or
edge_type_seq[depth] == "") and
(attrs[node_type_attr] == node_type_seq[depth]
or node_type_seq[depth] == "")):
visited_from[nbr].append(current)
# Continue traversal at next depth.
current = nbr
stack.append(current)
depth += 1
break
# If no valid nodes are available from
# this position, backtrack.
else:
stack.pop()
if len(stack) > 0:
current = stack[-1]
depth -= 1
# If max depth reached, store the
# valid node sequence.
else:
path = list(stack)
if node_alias:
path = Record(path, node_alias)
paths.append(path)
# Backtrack and keep checking.
stack.pop()
current = stack[-1]
depth -= 1
return paths
def build_subgraph(paths, graph, records=False):
"""
Takes the paths returned by match and builds a graph.
:param paths: List of lists.
:returns: networkx.Graph. Matched sugraph.
"""
g = nx.Graph()
for path in paths:
if records:
path = path._list
combined_paths = _combine_paths(path)
for edges in combined_paths:
attrs = graph[edges[0]][edges[1]]
g.add_edge(edges[0], edges[1], **attrs)
for node in g.nodes:
g.nodes[node].update(graph.node[node])
return g
def merge_attrs(new_attrs, old_attrs, reserved=[]):
"""
Merges attributes counting repeated attrs with dicts.
Kind of ugly, will need to take a look at this.
:param new_attrs: Dict.
:param old_attrs: Dict.
:reserved: List. A list of attributes that cannot have more than value.
:returns: Dict.
"""
attrs = {}
for k, v in old_attrs.items():
if k in reserved:
attrs[k] = v
elif isinstance(v, dict):
attrs[k] = dict(v)
elif isinstance(v, str) or isinstance(v, unicode):
attrs[k] = {v: 1}
for k, v in new_attrs.items():
if k in reserved:
attrs[k] = v
elif k in attrs:
count_dict = attrs[k]
if isinstance(v, dict):
for i, j in v.items():
count_dict.setdefault(i, 0)
count_dict[i] += j
elif isinstance(v, str) or isinstance(v, unicode):
count_dict.setdefault(v, 0)
count_dict[v] += 1
attrs[k] = count_dict
else:
if isinstance(v, dict):
attrs[k] = dict(v)
elif isinstance(v, str) or isinstance(v, unicode):
attrs[k] = {v: 1}
return attrs
class NXProjector(object):
def __init__(self, id_counter):
"""
This class holds the info and methods necessary for performing the ETL
actions on a networkx.Graph. It is not a wrapper, and does not store
the actual graph, just operates on it..
:param id_counter: Int. Used to handle combine ids.
"""
self._id_counter = id_counter
self._transformation = {}
self._transformation_init()
def transformation_wrapper(self, verb):
"""
Wraps the transformation methods and adds them to the transformations
dictionary.
:param verb: Str. The ProjX verb assiociated with the wrapped
function.
"""
def wrapper(fn):
self._transformation[verb] = fn
return wrapper
def _get_transformation(self):
"""
Return transformation for transformation property.
:returns: Dict. A dict containing a mapping of verbs to transformation
methods.
"""
return self._transformation
transformations = property(fget=_get_transformation)
def _transformation_init(self):
"""
A series of functions representing transformations. These are
wrapped by the transformation wrapper and added to the transformations
dict. Later during the parsing and execution phase these are called as
pointers to the various graph transformation methods
(transfer and project).
"""
@self.transformation_wrapper("project")
def execute_project(source, target, graph, attrs, node_type_attr,
edge_type_attr, **kwargs):
method = kwargs.get("method", {})
params = kwargs.get("params", [])
return project(source, target, graph, method, params, attrs,
node_type_attr, edge_type_attr)
@self.transformation_wrapper("transfer")
def execute_transfer(source, target, graph, attrs, node_type_attr,
edge_type_attr, **kwargs):
method = kwargs.get("method", {})
params = kwargs.get("params", [])
return transfer(source, target, graph, method, params, attrs,
node_type_attr, edge_type_attr)
@self.transformation_wrapper("combine")
def execute_combine(source, target, graph, attrs, node_type_attr,
edge_type_attr, **kwargs):
self._id_counter += 1
node_id = int(self._id_counter)
return combine(source, target, graph, node_id, attrs,
node_type_attr, edge_type_attr)
class Record(object):
def __init__(self, path, alias):
self._list = path
self._dict = {}
for i in range(len(path)):
self._dict[alias[i]] = path[i]
def __getitem__(self, item):
if isinstance(item, str):
return self._dict[item]
elif isinstance(item, int):
return self._list[item]
else:
raise Exception("Bad index.")
def _add_edges_from(graph, edges, edge_type_attr="type"):
"""
An alternative to the networkx.Graph.add_edges_from.
Handles non-reserved attributes as sets.
:param graph: networkx.Graph
:param edges: List of tuples. Tuple contains two node ids Int and an
attr Dict.
"""
for source, target, attrs in edges:
if graph.has_edge(source, target):
edge_attrs = graph[source][target]
merged_attrs = merge_attrs(attrs, edge_attrs,
[edge_type_attr, "weight", "label"])
graph[source][target].update(merged_attrs)
graph[target][source].update(merged_attrs)
else:
graph.add_edge(source, target, **attrs)
return graph
def _combine_paths(path):
"""
Turn path list into edge list.
:param path: List. A list of nodes representing a path.
:returns: List. A list of edge tuples.
"""
edges = []
for i, node in enumerate(path[1:]):
edges.append((path[i], node))
return edges
| 36.127182
| 78
| 0.591703
|
44f4dae62127b432b4e01c4272efbc76caa77413
| 1,483
|
py
|
Python
|
rev/MatrixMaster/sol/disass.py
|
NotCl0ne/foobarctf-22
|
c947501c982c266c3546269640b920a650077510
|
[
"MIT"
] | null | null | null |
rev/MatrixMaster/sol/disass.py
|
NotCl0ne/foobarctf-22
|
c947501c982c266c3546269640b920a650077510
|
[
"MIT"
] | null | null | null |
rev/MatrixMaster/sol/disass.py
|
NotCl0ne/foobarctf-22
|
c947501c982c266c3546269640b920a650077510
|
[
"MIT"
] | null | null | null |
import struct
base = 0x3018
d = bytearray(open("matrix","rb").read()[base:base+0x90])
pc = 0
# print(d)
# print(d[2])
while pc < len(d)-2:
print("%.2x : " % pc,end=' ')
if(d[pc] == 0):
print("XOR r%i, r%i\n" % (d[pc+1],d[pc+2]))
pc+=3
continue
if(d[pc] == 1):
print("AND r%i, r%i \n" % (d[pc+1],d[pc+2]))
pc+=3
continue
if(d[pc] == 2):
print("OR r%i, r%i \n" % (d[pc+1],d[pc+2]))
pc+=3
continue
if(d[pc] == 3):
print("MOV r%i, r%i \n" % (d[pc+1],d[pc+2]))
pc+=3
continue
if(d[pc] == 4):
print("MOVI r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
if(d[pc] == 5):
print("LEFT r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
if(d[pc] == 6):
print("RIGHT r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
if(d[pc] == 0xfc):
print("Add r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
if(d[pc] == 0xfd):
print("Sub r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
if(d[pc] == 0xfe):
print("Rand r%i, 0x%.8x \n" % (d[pc+1],struct.unpack("I",d[pc+2:pc+2+4])[0]))
pc+=6
continue
else :
print("")
pc+=1
continue
| 25.135593
| 86
| 0.426837
|
39cf65d4b7a031da29d2028b52118acafee2d02a
| 789
|
py
|
Python
|
profiles_api/migrations/0002_userprofilesfeed.py
|
glen-s-abraham/profile-rest-api
|
0cbcd545b4394905b7b4ab58a0b7ab4f1e508412
|
[
"MIT"
] | null | null | null |
profiles_api/migrations/0002_userprofilesfeed.py
|
glen-s-abraham/profile-rest-api
|
0cbcd545b4394905b7b4ab58a0b7ab4f1e508412
|
[
"MIT"
] | 2
|
2021-06-10T20:00:16.000Z
|
2021-09-22T19:31:57.000Z
|
profiles_api/migrations/0002_userprofilesfeed.py
|
glen-s-abraham/profile-rest-api
|
0cbcd545b4394905b7b4ab58a0b7ab4f1e508412
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.8 on 2020-08-07 13:00
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('profiles_api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='UserProfilesFeed',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('feed_text', models.CharField(max_length=255)),
('created_on', models.DateTimeField(auto_now_add=True)),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 31.56
| 126
| 0.632446
|
bb7a330524d65a7926700dbb1a230e2e44bb5e38
| 820
|
py
|
Python
|
isi_sdk_8_1_1/test/test_empty.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_1_1/test/test_empty.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_1_1/test/test_empty.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 6
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_1_1
from isi_sdk_8_1_1.models.empty import Empty # noqa: E501
from isi_sdk_8_1_1.rest import ApiException
class TestEmpty(unittest.TestCase):
"""Empty unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testEmpty(self):
"""Test Empty"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_1_1.models.empty.Empty() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 20
| 79
| 0.67561
|
2be2f5af96209a89ff0a00144c986d90b8ccbc6d
| 1,049
|
py
|
Python
|
model/medical_test_critearea.py
|
IDRISSOUM/hospital_management
|
56a768f29269a77bc890d40479a8aacb90867189
|
[
"Unlicense"
] | null | null | null |
model/medical_test_critearea.py
|
IDRISSOUM/hospital_management
|
56a768f29269a77bc890d40479a8aacb90867189
|
[
"Unlicense"
] | null | null | null |
model/medical_test_critearea.py
|
IDRISSOUM/hospital_management
|
56a768f29269a77bc890d40479a8aacb90867189
|
[
"Unlicense"
] | null | null | null |
# -*- coding: utf-8 -*-
# Part of BrowseInfo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, models, _
# classes under cofigration menu of laboratry
class medical_test_critearea(models.Model):
_name = 'medical_test.critearea'
test_id = fields.Many2one('medical.test_type',)
name = fields.Char('Name',)
seq = fields.Integer('Sequence', default=1)
medical_test_type_id = fields.Many2one ('medical.test_type', 'Test Type')
medical_lab_id = fields.Many2one('medical.lab', 'Medical Lab Result')
warning = fields.Boolean('Warning')
excluded = fields.Boolean('Excluded')
lower_limit = fields.Float('Lower Limit')
upper_limit = fields.Float('Upper Limit')
lab_test_unit_id = fields.Many2one('medical.lab.test.units', 'Units')
result = fields.Float('Result')
result_text = fields.Char('Result Text')
normal_range = fields.Char('Normal Range')
remark = fields.Text('Remarks')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| 38.851852
| 80
| 0.709247
|
87b8240dc1aefa056f28ea059a0c2fb960f7051d
| 1,135
|
py
|
Python
|
extensions/widgets/interactive/TarFileReadInput/TarFileReadInput.py
|
aldeka/oppia
|
aead304c95a282c9ca8035bc25c4794864d07578
|
[
"Apache-2.0"
] | 3
|
2015-01-10T23:45:23.000Z
|
2015-02-17T10:46:08.000Z
|
extensions/widgets/interactive/TarFileReadInput/TarFileReadInput.py
|
aldeka/oppia
|
aead304c95a282c9ca8035bc25c4794864d07578
|
[
"Apache-2.0"
] | null | null | null |
extensions/widgets/interactive/TarFileReadInput/TarFileReadInput.py
|
aldeka/oppia
|
aead304c95a282c9ca8035bc25c4794864d07578
|
[
"Apache-2.0"
] | null | null | null |
from core.domain import widget_domain
class TarFileReadInput(widget_domain.BaseWidget):
"""Definition of a widget.
Do NOT make any changes to this widget definition while the Oppia app is
running, otherwise things will break.
This class represents a widget, whose id is the name of the class. It is
auto-discovered when the default widgets are refreshed.
"""
# The human-readable name of the widget.
name = 'Tar file upload'
# The category the widget falls under in the widget repository.
category = 'Custom'
# A description of the widget.
description = (
'A widget for uploading tar files.'
)
# Customization parameters and their descriptions, types and default
# values. This attribute name MUST be prefixed by '_'.
_params = []
# Actions that the reader can perform on this widget which trigger a
# feedback interaction, and the associated input types. Interactive widgets
# must have at least one of these. This attribute name MUST be prefixed by
# '_'.
_handlers = [{
'name': 'submit', 'obj_type': 'TarFileString'
}]
| 31.527778
| 79
| 0.69163
|
02c03a4541933018926eb5aed0a215364b5c850c
| 3,564
|
py
|
Python
|
user/models.py
|
mstgnz/WMS
|
64aaa43fdd8fb682dedb792831b13d64046b385f
|
[
"Apache-2.0"
] | null | null | null |
user/models.py
|
mstgnz/WMS
|
64aaa43fdd8fb682dedb792831b13d64046b385f
|
[
"Apache-2.0"
] | null | null | null |
user/models.py
|
mstgnz/WMS
|
64aaa43fdd8fb682dedb792831b13d64046b385f
|
[
"Apache-2.0"
] | null | null | null |
import os
from django.db import models
from django.conf import settings
from django.utils.text import slugify
from django.urls import reverse
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser, PermissionsMixin
from django.core.validators import RegexValidator
class UserManager(BaseUserManager):
def create_user(self, email, password=None):
if not email:
raise ValueError('Users must have an email address')
user = self.model(email=self.normalize_email(email))
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password):
user = self.create_user(email, password=password)
user.is_staff = True
user.is_manager = True
user.is_superuser = True
user.save(using=self._db)
return user
class User(AbstractBaseUser, PermissionsMixin):
id = models.AutoField(primary_key=True)
firm = models.ForeignKey('firm.Firm', blank=True, null=True, related_name='users', on_delete=models.CASCADE)
worksite = models.ManyToManyField('firm.Worksite', blank=True, verbose_name='Şantiye')
email = models.EmailField(unique=True, max_length=255)
first_name = models.CharField(max_length=15, blank=True, null=True)
last_name = models.CharField(max_length=20, blank=True, null=True)
address = models.CharField(max_length=200, blank=True, null=True)
image = models.ImageField(blank=True, null=True)
phone = models.CharField(max_length=10, blank=True, null=True, validators=[RegexValidator(r'^\d{10}$')])
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
last_login = models.DateTimeField(auto_now_add=True, editable=False)
date_joined = models.DateTimeField(auto_now_add=True, editable=False)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = [] # Email & Password are required by default.
objects = UserManager()
class Meta:
db_table = 'user'
ordering = ['-id']
def __str__(self):
return self.email
def get_short_name(self):
return self.first_name
def get_full_name(self):
return '{} {}'.format(self.first_name, self.last_name)
def get_absolute_url(self):
return reverse('user:profile_update', kwargs={'pk':self.pk})
def get_image_or_default(self):
if self.image and hasattr(self.image, 'url'):
return self.image.url
return '/static/site/img/user.png'
def save(self, *args, **kwargs):
if self.image:
self.image.name = slugify(str(self.firm).replace('ı','i'))+'/img/'+self.email.split('@')[0]+os.path.splitext(self.image.name)[1]
return super(User, self).save(*args, **kwargs)
# BÜTÜN MODELLERİN İŞLEMLERİNİ KAYIT ALTINA ALACAĞIZ. HER MODEL İÇİN SİNYAL (SİGNAL) TANIMLANACAKTIR.
class Logger(models.Model):
id = models.AutoField(primary_key=True)
user = models.ForeignKey('User', verbose_name='User', related_name='loggers', on_delete=models.CASCADE)
name = models.CharField(max_length=20, verbose_name='Tablo Adı')
row = models.BigIntegerField(verbose_name='Tablo ID')
data = models.TextField(verbose_name='Veri')
action = models.CharField(max_length=10, verbose_name='İşlem') # saved or deleted
create_date = models.DateField(auto_now_add=True, editable=False)
class Meta:
db_table = 'Logger'
ordering = ['-id']
def __str__(self):
return self.name
| 38.73913
| 140
| 0.699214
|
2712f541b015421b6a52802fa488845e16f528b7
| 41,647
|
py
|
Python
|
seqpos/lib/python2.7/site-packages/hgext/bugzilla.py
|
guanjue/seqpos
|
ab9308ad128547ca968a1d944490710e583703bc
|
[
"MIT"
] | null | null | null |
seqpos/lib/python2.7/site-packages/hgext/bugzilla.py
|
guanjue/seqpos
|
ab9308ad128547ca968a1d944490710e583703bc
|
[
"MIT"
] | null | null | null |
seqpos/lib/python2.7/site-packages/hgext/bugzilla.py
|
guanjue/seqpos
|
ab9308ad128547ca968a1d944490710e583703bc
|
[
"MIT"
] | null | null | null |
# bugzilla.py - bugzilla integration for mercurial
#
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
# Copyright 2011-4 Jim Hague <jim.hague@acm.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''hooks for integrating with the Bugzilla bug tracker
This hook extension adds comments on bugs in Bugzilla when changesets
that refer to bugs by Bugzilla ID are seen. The comment is formatted using
the Mercurial template mechanism.
The bug references can optionally include an update for Bugzilla of the
hours spent working on the bug. Bugs can also be marked fixed.
Four basic modes of access to Bugzilla are provided:
1. Access via the Bugzilla REST-API. Requires bugzilla 5.0 or later.
2. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
3. Check data via the Bugzilla XMLRPC interface and submit bug change
via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
4. Writing directly to the Bugzilla database. Only Bugzilla installations
using MySQL are supported. Requires Python MySQLdb.
Writing directly to the database is susceptible to schema changes, and
relies on a Bugzilla contrib script to send out bug change
notification emails. This script runs as the user running Mercurial,
must be run on the host with the Bugzilla install, and requires
permission to read Bugzilla configuration details and the necessary
MySQL user and password to have full access rights to the Bugzilla
database. For these reasons this access mode is now considered
deprecated, and will not be updated for new Bugzilla versions going
forward. Only adding comments is supported in this access mode.
Access via XMLRPC needs a Bugzilla username and password to be specified
in the configuration. Comments are added under that username. Since the
configuration must be readable by all Mercurial users, it is recommended
that the rights of that user are restricted in Bugzilla to the minimum
necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
email to the Bugzilla email interface to submit comments to bugs.
The From: address in the email is set to the email address of the Mercurial
user, so the comment appears to come from the Mercurial user. In the event
that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
user, the email associated with the Bugzilla username used to log into
Bugzilla is used instead as the source of the comment. Marking bugs fixed
works on all supported Bugzilla versions.
Access via the REST-API needs either a Bugzilla username and password
or an apikey specified in the configuration. Comments are made under
the given username or the user associated with the apikey in Bugzilla.
Configuration items common to all access modes:
bugzilla.version
The access type to use. Values recognized are:
:``restapi``: Bugzilla REST-API, Bugzilla 5.0 and later.
:``xmlrpc``: Bugzilla XMLRPC interface.
:``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
:``3.0``: MySQL access, Bugzilla 3.0 and later.
:``2.18``: MySQL access, Bugzilla 2.18 and up to but not
including 3.0.
:``2.16``: MySQL access, Bugzilla 2.16 and up to but not
including 2.18.
bugzilla.regexp
Regular expression to match bug IDs for update in changeset commit message.
It must contain one "()" named group ``<ids>`` containing the bug
IDs separated by non-digit characters. It may also contain
a named group ``<hours>`` with a floating-point number giving the
hours worked on the bug. If no named groups are present, the first
"()" group is assumed to contain the bug IDs, and work time is not
updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
variations thereof, followed by an hours number prefixed by ``h`` or
``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
bugzilla.fixregexp
Regular expression to match bug IDs for marking fixed in changeset
commit message. This must contain a "()" named group ``<ids>` containing
the bug IDs separated by non-digit characters. It may also contain
a named group ``<hours>`` with a floating-point number giving the
hours worked on the bug. If no named groups are present, the first
"()" group is assumed to contain the bug IDs, and work time is not
updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
variations thereof, followed by an hours number prefixed by ``h`` or
``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
bugzilla.fixstatus
The status to set a bug to when marking fixed. Default ``RESOLVED``.
bugzilla.fixresolution
The resolution to set a bug to when marking fixed. Default ``FIXED``.
bugzilla.style
The style file to use when formatting comments.
bugzilla.template
Template to use when formatting comments. Overrides style if
specified. In addition to the usual Mercurial keywords, the
extension specifies:
:``{bug}``: The Bugzilla bug ID.
:``{root}``: The full pathname of the Mercurial repository.
:``{webroot}``: Stripped pathname of the Mercurial repository.
:``{hgweb}``: Base URL for browsing Mercurial repositories.
Default ``changeset {node|short} in repo {root} refers to bug
{bug}.\\ndetails:\\n\\t{desc|tabindent}``
bugzilla.strip
The number of path separator characters to strip from the front of
the Mercurial repository path (``{root}`` in templates) to produce
``{webroot}``. For example, a repository with ``{root}``
``/var/local/my-project`` with a strip of 2 gives a value for
``{webroot}`` of ``my-project``. Default 0.
web.baseurl
Base URL for browsing Mercurial repositories. Referenced from
templates as ``{hgweb}``.
Configuration items common to XMLRPC+email and MySQL access modes:
bugzilla.usermap
Path of file containing Mercurial committer email to Bugzilla user email
mappings. If specified, the file should contain one mapping per
line::
committer = Bugzilla user
See also the ``[usermap]`` section.
The ``[usermap]`` section is used to specify mappings of Mercurial
committer email to Bugzilla user email. See also ``bugzilla.usermap``.
Contains entries of the form ``committer = Bugzilla user``.
XMLRPC and REST-API access mode configuration:
bugzilla.bzurl
The base URL for the Bugzilla installation.
Default ``http://localhost/bugzilla``.
bugzilla.user
The username to use to log into Bugzilla via XMLRPC. Default
``bugs``.
bugzilla.password
The password for Bugzilla login.
REST-API access mode uses the options listed above as well as:
bugzilla.apikey
An apikey generated on the Bugzilla instance for api access.
Using an apikey removes the need to store the user and password
options.
XMLRPC+email access mode uses the XMLRPC access mode configuration items,
and also:
bugzilla.bzemail
The Bugzilla email address.
In addition, the Mercurial email settings must be configured. See the
documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
MySQL access mode configuration:
bugzilla.host
Hostname of the MySQL server holding the Bugzilla database.
Default ``localhost``.
bugzilla.db
Name of the Bugzilla database in MySQL. Default ``bugs``.
bugzilla.user
Username to use to access MySQL server. Default ``bugs``.
bugzilla.password
Password to use to access MySQL server.
bugzilla.timeout
Database connection timeout (seconds). Default 5.
bugzilla.bzuser
Fallback Bugzilla user name to record comments with, if changeset
committer cannot be found as a Bugzilla user.
bugzilla.bzdir
Bugzilla install directory. Used by default notify. Default
``/var/www/html/bugzilla``.
bugzilla.notify
The command to run to get Bugzilla to send bug change notification
emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
id) and ``user`` (committer bugzilla email). Default depends on
version; from 2.18 it is "cd %(bzdir)s && perl -T
contrib/sendbugmail.pl %(id)s %(user)s".
Activating the extension::
[extensions]
bugzilla =
[hooks]
# run bugzilla hook on every change pulled or pushed in here
incoming.bugzilla = python:hgext.bugzilla.hook
Example configurations:
XMLRPC example configuration. This uses the Bugzilla at
``http://my-project.org/bugzilla``, logging in as user
``bugmail@my-project.org`` with password ``plugh``. It is used with a
collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. ::
[bugzilla]
bzurl=http://my-project.org/bugzilla
user=bugmail@my-project.org
password=plugh
version=xmlrpc
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
XMLRPC+email example configuration. This uses the Bugzilla at
``http://my-project.org/bugzilla``, logging in as user
``bugmail@my-project.org`` with password ``plugh``. It is used with a
collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. Bug comments
are sent to the Bugzilla email address
``bugzilla@my-project.org``. ::
[bugzilla]
bzurl=http://my-project.org/bugzilla
user=bugmail@my-project.org
password=plugh
version=xmlrpc+email
bzemail=bugzilla@my-project.org
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
[usermap]
user@emaildomain.com=user.name@bugzilladomain.com
MySQL example configuration. This has a local Bugzilla 3.2 installation
in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
the Bugzilla database name is ``bugs`` and MySQL is
accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. ::
[bugzilla]
host=localhost
password=XYZZY
version=3.0
bzuser=unknown@domain.com
bzdir=/opt/bugzilla-3.2
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
[usermap]
user@emaildomain.com=user.name@bugzilladomain.com
All the above add a comment to the Bugzilla bug record of the form::
Changeset 3b16791d6642 in repository-name.
http://my-project.org/hg/repository-name/rev/3b16791d6642
Changeset commit comment. Bug 1234.
'''
from __future__ import absolute_import
import json
import re
import time
from mercurial.i18n import _
from mercurial.node import short
from mercurial import (
error,
logcmdutil,
mail,
registrar,
url,
util,
)
from mercurial.utils import (
procutil,
stringutil,
)
xmlrpclib = util.xmlrpclib
# Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
# extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
# be specifying the version(s) of Mercurial they are tested with, or
# leave the attribute unspecified.
testedwith = 'ships-with-hg-core'
configtable = {}
configitem = registrar.configitem(configtable)
configitem('bugzilla', 'apikey',
default='',
)
configitem('bugzilla', 'bzdir',
default='/var/www/html/bugzilla',
)
configitem('bugzilla', 'bzemail',
default=None,
)
configitem('bugzilla', 'bzurl',
default='http://localhost/bugzilla/',
)
configitem('bugzilla', 'bzuser',
default=None,
)
configitem('bugzilla', 'db',
default='bugs',
)
configitem('bugzilla', 'fixregexp',
default=(r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
r'(?:nos?\.?|num(?:ber)?s?)?\s*'
r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
)
configitem('bugzilla', 'fixresolution',
default='FIXED',
)
configitem('bugzilla', 'fixstatus',
default='RESOLVED',
)
configitem('bugzilla', 'host',
default='localhost',
)
configitem('bugzilla', 'notify',
default=configitem.dynamicdefault,
)
configitem('bugzilla', 'password',
default=None,
)
configitem('bugzilla', 'regexp',
default=(r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
)
configitem('bugzilla', 'strip',
default=0,
)
configitem('bugzilla', 'style',
default=None,
)
configitem('bugzilla', 'template',
default=None,
)
configitem('bugzilla', 'timeout',
default=5,
)
configitem('bugzilla', 'user',
default='bugs',
)
configitem('bugzilla', 'usermap',
default=None,
)
configitem('bugzilla', 'version',
default=None,
)
class bzaccess(object):
'''Base class for access to Bugzilla.'''
def __init__(self, ui):
self.ui = ui
usermap = self.ui.config('bugzilla', 'usermap')
if usermap:
self.ui.readconfig(usermap, sections=['usermap'])
def map_committer(self, user):
'''map name of committer to Bugzilla user name.'''
for committer, bzuser in self.ui.configitems('usermap'):
if committer.lower() == user.lower():
return bzuser
return user
# Methods to be implemented by access classes.
#
# 'bugs' is a dict keyed on bug id, where values are a dict holding
# updates to bug state. Recognized dict keys are:
#
# 'hours': Value, float containing work hours to be updated.
# 'fix': If key present, bug is to be marked fixed. Value ignored.
def filter_real_bug_ids(self, bugs):
'''remove bug IDs that do not exist in Bugzilla from bugs.'''
def filter_cset_known_bug_ids(self, node, bugs):
'''remove bug IDs where node occurs in comment text from bugs.'''
def updatebug(self, bugid, newstate, text, committer):
'''update the specified bug. Add comment text and set new states.
If possible add the comment as being from the committer of
the changeset. Otherwise use the default Bugzilla user.
'''
def notify(self, bugs, committer):
'''Force sending of Bugzilla notification emails.
Only required if the access method does not trigger notification
emails automatically.
'''
# Bugzilla via direct access to MySQL database.
class bzmysql(bzaccess):
'''Support for direct MySQL access to Bugzilla.
The earliest Bugzilla version this is tested with is version 2.16.
If your Bugzilla is version 3.4 or above, you are strongly
recommended to use the XMLRPC access method instead.
'''
@staticmethod
def sql_buglist(ids):
'''return SQL-friendly list of bug ids'''
return '(' + ','.join(map(str, ids)) + ')'
_MySQLdb = None
def __init__(self, ui):
try:
import MySQLdb as mysql
bzmysql._MySQLdb = mysql
except ImportError as err:
raise error.Abort(_('python mysql support not available: %s') % err)
bzaccess.__init__(self, ui)
host = self.ui.config('bugzilla', 'host')
user = self.ui.config('bugzilla', 'user')
passwd = self.ui.config('bugzilla', 'password')
db = self.ui.config('bugzilla', 'db')
timeout = int(self.ui.config('bugzilla', 'timeout'))
self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
(host, db, user, '*' * len(passwd)))
self.conn = bzmysql._MySQLdb.connect(host=host,
user=user, passwd=passwd,
db=db,
connect_timeout=timeout)
self.cursor = self.conn.cursor()
self.longdesc_id = self.get_longdesc_id()
self.user_ids = {}
self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
def run(self, *args, **kwargs):
'''run a query.'''
self.ui.note(_('query: %s %s\n') % (args, kwargs))
try:
self.cursor.execute(*args, **kwargs)
except bzmysql._MySQLdb.MySQLError:
self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
raise
def get_longdesc_id(self):
'''get identity of longdesc field'''
self.run('select fieldid from fielddefs where name = "longdesc"')
ids = self.cursor.fetchall()
if len(ids) != 1:
raise error.Abort(_('unknown database schema'))
return ids[0][0]
def filter_real_bug_ids(self, bugs):
'''filter not-existing bugs from set.'''
self.run('select bug_id from bugs where bug_id in %s' %
bzmysql.sql_buglist(bugs.keys()))
existing = [id for (id,) in self.cursor.fetchall()]
for id in bugs.keys():
if id not in existing:
self.ui.status(_('bug %d does not exist\n') % id)
del bugs[id]
def filter_cset_known_bug_ids(self, node, bugs):
'''filter bug ids that already refer to this changeset from set.'''
self.run('''select bug_id from longdescs where
bug_id in %s and thetext like "%%%s%%"''' %
(bzmysql.sql_buglist(bugs.keys()), short(node)))
for (id,) in self.cursor.fetchall():
self.ui.status(_('bug %d already knows about changeset %s\n') %
(id, short(node)))
del bugs[id]
def notify(self, bugs, committer):
'''tell bugzilla to send mail.'''
self.ui.status(_('telling bugzilla to send mail:\n'))
(user, userid) = self.get_bugzilla_user(committer)
for id in bugs.keys():
self.ui.status(_(' bug %s\n') % id)
cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
bzdir = self.ui.config('bugzilla', 'bzdir')
try:
# Backwards-compatible with old notify string, which
# took one string. This will throw with a new format
# string.
cmd = cmdfmt % id
except TypeError:
cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
self.ui.note(_('running notify command %s\n') % cmd)
fp = procutil.popen('(%s) 2>&1' % cmd, 'rb')
out = util.fromnativeeol(fp.read())
ret = fp.close()
if ret:
self.ui.warn(out)
raise error.Abort(_('bugzilla notify command %s') %
procutil.explainexit(ret))
self.ui.status(_('done\n'))
def get_user_id(self, user):
'''look up numeric bugzilla user id.'''
try:
return self.user_ids[user]
except KeyError:
try:
userid = int(user)
except ValueError:
self.ui.note(_('looking up user %s\n') % user)
self.run('''select userid from profiles
where login_name like %s''', user)
all = self.cursor.fetchall()
if len(all) != 1:
raise KeyError(user)
userid = int(all[0][0])
self.user_ids[user] = userid
return userid
def get_bugzilla_user(self, committer):
'''See if committer is a registered bugzilla user. Return
bugzilla username and userid if so. If not, return default
bugzilla username and userid.'''
user = self.map_committer(committer)
try:
userid = self.get_user_id(user)
except KeyError:
try:
defaultuser = self.ui.config('bugzilla', 'bzuser')
if not defaultuser:
raise error.Abort(_('cannot find bugzilla user id for %s') %
user)
userid = self.get_user_id(defaultuser)
user = defaultuser
except KeyError:
raise error.Abort(_('cannot find bugzilla user id for %s or %s')
% (user, defaultuser))
return (user, userid)
def updatebug(self, bugid, newstate, text, committer):
'''update bug state with comment text.
Try adding comment as committer of changeset, otherwise as
default bugzilla user.'''
if len(newstate) > 0:
self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
(user, userid) = self.get_bugzilla_user(committer)
now = time.strftime(r'%Y-%m-%d %H:%M:%S')
self.run('''insert into longdescs
(bug_id, who, bug_when, thetext)
values (%s, %s, %s, %s)''',
(bugid, userid, now, text))
self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
values (%s, %s, %s, %s)''',
(bugid, userid, now, self.longdesc_id))
self.conn.commit()
class bzmysql_2_18(bzmysql):
'''support for bugzilla 2.18 series.'''
def __init__(self, ui):
bzmysql.__init__(self, ui)
self.default_notify = \
"cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
class bzmysql_3_0(bzmysql_2_18):
'''support for bugzilla 3.0 series.'''
def __init__(self, ui):
bzmysql_2_18.__init__(self, ui)
def get_longdesc_id(self):
'''get identity of longdesc field'''
self.run('select id from fielddefs where name = "longdesc"')
ids = self.cursor.fetchall()
if len(ids) != 1:
raise error.Abort(_('unknown database schema'))
return ids[0][0]
# Bugzilla via XMLRPC interface.
class cookietransportrequest(object):
"""A Transport request method that retains cookies over its lifetime.
The regular xmlrpclib transports ignore cookies. Which causes
a bit of a problem when you need a cookie-based login, as with
the Bugzilla XMLRPC interface prior to 4.4.3.
So this is a helper for defining a Transport which looks for
cookies being set in responses and saves them to add to all future
requests.
"""
# Inspiration drawn from
# http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
# http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
cookies = []
def send_cookies(self, connection):
if self.cookies:
for cookie in self.cookies:
connection.putheader("Cookie", cookie)
def request(self, host, handler, request_body, verbose=0):
self.verbose = verbose
self.accept_gzip_encoding = False
# issue XML-RPC request
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
self.send_request(h, handler, request_body)
self.send_host(h, host)
self.send_cookies(h)
self.send_user_agent(h)
self.send_content(h, request_body)
# Deal with differences between Python 2.6 and 2.7.
# In the former h is a HTTP(S). In the latter it's a
# HTTP(S)Connection. Luckily, the 2.6 implementation of
# HTTP(S) has an underlying HTTP(S)Connection, so extract
# that and use it.
try:
response = h.getresponse()
except AttributeError:
response = h._conn.getresponse()
# Add any cookie definitions to our list.
for header in response.msg.getallmatchingheaders("Set-Cookie"):
val = header.split(": ", 1)[1]
cookie = val.split(";", 1)[0]
self.cookies.append(cookie)
if response.status != 200:
raise xmlrpclib.ProtocolError(host + handler, response.status,
response.reason, response.msg.headers)
payload = response.read()
parser, unmarshaller = self.getparser()
parser.feed(payload)
parser.close()
return unmarshaller.close()
# The explicit calls to the underlying xmlrpclib __init__() methods are
# necessary. The xmlrpclib.Transport classes are old-style classes, and
# it turns out their __init__() doesn't get called when doing multiple
# inheritance with a new-style class.
class cookietransport(cookietransportrequest, xmlrpclib.Transport):
def __init__(self, use_datetime=0):
if util.safehasattr(xmlrpclib.Transport, "__init__"):
xmlrpclib.Transport.__init__(self, use_datetime)
class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
def __init__(self, use_datetime=0):
if util.safehasattr(xmlrpclib.Transport, "__init__"):
xmlrpclib.SafeTransport.__init__(self, use_datetime)
class bzxmlrpc(bzaccess):
"""Support for access to Bugzilla via the Bugzilla XMLRPC API.
Requires a minimum Bugzilla version 3.4.
"""
def __init__(self, ui):
bzaccess.__init__(self, ui)
bzweb = self.ui.config('bugzilla', 'bzurl')
bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
user = self.ui.config('bugzilla', 'user')
passwd = self.ui.config('bugzilla', 'password')
self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
ver = self.bzproxy.Bugzilla.version()['version'].split('.')
self.bzvermajor = int(ver[0])
self.bzverminor = int(ver[1])
login = self.bzproxy.User.login({'login': user, 'password': passwd,
'restrict_login': True})
self.bztoken = login.get('token', '')
def transport(self, uri):
if util.urlreq.urlparse(uri, "http")[0] == "https":
return cookiesafetransport()
else:
return cookietransport()
def get_bug_comments(self, id):
"""Return a string with all comment text for a bug."""
c = self.bzproxy.Bug.comments({'ids': [id],
'include_fields': ['text'],
'token': self.bztoken})
return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
def filter_real_bug_ids(self, bugs):
probe = self.bzproxy.Bug.get({'ids': sorted(bugs.keys()),
'include_fields': [],
'permissive': True,
'token': self.bztoken,
})
for badbug in probe['faults']:
id = badbug['id']
self.ui.status(_('bug %d does not exist\n') % id)
del bugs[id]
def filter_cset_known_bug_ids(self, node, bugs):
for id in sorted(bugs.keys()):
if self.get_bug_comments(id).find(short(node)) != -1:
self.ui.status(_('bug %d already knows about changeset %s\n') %
(id, short(node)))
del bugs[id]
def updatebug(self, bugid, newstate, text, committer):
args = {}
if 'hours' in newstate:
args['work_time'] = newstate['hours']
if self.bzvermajor >= 4:
args['ids'] = [bugid]
args['comment'] = {'body' : text}
if 'fix' in newstate:
args['status'] = self.fixstatus
args['resolution'] = self.fixresolution
args['token'] = self.bztoken
self.bzproxy.Bug.update(args)
else:
if 'fix' in newstate:
self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
"to mark bugs fixed\n"))
args['id'] = bugid
args['comment'] = text
self.bzproxy.Bug.add_comment(args)
class bzxmlrpcemail(bzxmlrpc):
"""Read data from Bugzilla via XMLRPC, send updates via email.
Advantages of sending updates via email:
1. Comments can be added as any user, not just logged in user.
2. Bug statuses or other fields not accessible via XMLRPC can
potentially be updated.
There is no XMLRPC function to change bug status before Bugzilla
4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
But bugs can be marked fixed via email from 3.4 onwards.
"""
# The email interface changes subtly between 3.4 and 3.6. In 3.4,
# in-email fields are specified as '@<fieldname> = <value>'. In
# 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
# in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
# compatibility, but rather than rely on this use the new format for
# 4.0 onwards.
def __init__(self, ui):
bzxmlrpc.__init__(self, ui)
self.bzemail = self.ui.config('bugzilla', 'bzemail')
if not self.bzemail:
raise error.Abort(_("configuration 'bzemail' missing"))
mail.validateconfig(self.ui)
def makecommandline(self, fieldname, value):
if self.bzvermajor >= 4:
return "@%s %s" % (fieldname, str(value))
else:
if fieldname == "id":
fieldname = "bug_id"
return "@%s = %s" % (fieldname, str(value))
def send_bug_modify_email(self, bugid, commands, comment, committer):
'''send modification message to Bugzilla bug via email.
The message format is documented in the Bugzilla email_in.pl
specification. commands is a list of command lines, comment is the
comment text.
To stop users from crafting commit comments with
Bugzilla commands, specify the bug ID via the message body, rather
than the subject line, and leave a blank line after it.
'''
user = self.map_committer(committer)
matches = self.bzproxy.User.get({'match': [user],
'token': self.bztoken})
if not matches['users']:
user = self.ui.config('bugzilla', 'user')
matches = self.bzproxy.User.get({'match': [user],
'token': self.bztoken})
if not matches['users']:
raise error.Abort(_("default bugzilla user %s email not found")
% user)
user = matches['users'][0]['email']
commands.append(self.makecommandline("id", bugid))
text = "\n".join(commands) + "\n\n" + comment
_charsets = mail._charsets(self.ui)
user = mail.addressencode(self.ui, user, _charsets)
bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
msg = mail.mimeencode(self.ui, text, _charsets)
msg['From'] = user
msg['To'] = bzemail
msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
sendmail = mail.connect(self.ui)
sendmail(user, bzemail, msg.as_string())
def updatebug(self, bugid, newstate, text, committer):
cmds = []
if 'hours' in newstate:
cmds.append(self.makecommandline("work_time", newstate['hours']))
if 'fix' in newstate:
cmds.append(self.makecommandline("bug_status", self.fixstatus))
cmds.append(self.makecommandline("resolution", self.fixresolution))
self.send_bug_modify_email(bugid, cmds, text, committer)
class NotFound(LookupError):
pass
class bzrestapi(bzaccess):
"""Read and write bugzilla data using the REST API available since
Bugzilla 5.0.
"""
def __init__(self, ui):
bzaccess.__init__(self, ui)
bz = self.ui.config('bugzilla', 'bzurl')
self.bzroot = '/'.join([bz, 'rest'])
self.apikey = self.ui.config('bugzilla', 'apikey')
self.user = self.ui.config('bugzilla', 'user')
self.passwd = self.ui.config('bugzilla', 'password')
self.fixstatus = self.ui.config('bugzilla', 'fixstatus')
self.fixresolution = self.ui.config('bugzilla', 'fixresolution')
def apiurl(self, targets, include_fields=None):
url = '/'.join([self.bzroot] + [str(t) for t in targets])
qv = {}
if self.apikey:
qv['api_key'] = self.apikey
elif self.user and self.passwd:
qv['login'] = self.user
qv['password'] = self.passwd
if include_fields:
qv['include_fields'] = include_fields
if qv:
url = '%s?%s' % (url, util.urlreq.urlencode(qv))
return url
def _fetch(self, burl):
try:
resp = url.open(self.ui, burl)
return json.loads(resp.read())
except util.urlerr.httperror as inst:
if inst.code == 401:
raise error.Abort(_('authorization failed'))
if inst.code == 404:
raise NotFound()
else:
raise
def _submit(self, burl, data, method='POST'):
data = json.dumps(data)
if method == 'PUT':
class putrequest(util.urlreq.request):
def get_method(self):
return 'PUT'
request_type = putrequest
else:
request_type = util.urlreq.request
req = request_type(burl, data,
{'Content-Type': 'application/json'})
try:
resp = url.opener(self.ui).open(req)
return json.loads(resp.read())
except util.urlerr.httperror as inst:
if inst.code == 401:
raise error.Abort(_('authorization failed'))
if inst.code == 404:
raise NotFound()
else:
raise
def filter_real_bug_ids(self, bugs):
'''remove bug IDs that do not exist in Bugzilla from bugs.'''
badbugs = set()
for bugid in bugs:
burl = self.apiurl(('bug', bugid), include_fields='status')
try:
self._fetch(burl)
except NotFound:
badbugs.add(bugid)
for bugid in badbugs:
del bugs[bugid]
def filter_cset_known_bug_ids(self, node, bugs):
'''remove bug IDs where node occurs in comment text from bugs.'''
sn = short(node)
for bugid in bugs.keys():
burl = self.apiurl(('bug', bugid, 'comment'), include_fields='text')
result = self._fetch(burl)
comments = result['bugs'][str(bugid)]['comments']
if any(sn in c['text'] for c in comments):
self.ui.status(_('bug %d already knows about changeset %s\n') %
(bugid, sn))
del bugs[bugid]
def updatebug(self, bugid, newstate, text, committer):
'''update the specified bug. Add comment text and set new states.
If possible add the comment as being from the committer of
the changeset. Otherwise use the default Bugzilla user.
'''
bugmod = {}
if 'hours' in newstate:
bugmod['work_time'] = newstate['hours']
if 'fix' in newstate:
bugmod['status'] = self.fixstatus
bugmod['resolution'] = self.fixresolution
if bugmod:
# if we have to change the bugs state do it here
bugmod['comment'] = {
'comment': text,
'is_private': False,
'is_markdown': False,
}
burl = self.apiurl(('bug', bugid))
self._submit(burl, bugmod, method='PUT')
self.ui.debug('updated bug %s\n' % bugid)
else:
burl = self.apiurl(('bug', bugid, 'comment'))
self._submit(burl, {
'comment': text,
'is_private': False,
'is_markdown': False,
})
self.ui.debug('added comment to bug %s\n' % bugid)
def notify(self, bugs, committer):
'''Force sending of Bugzilla notification emails.
Only required if the access method does not trigger notification
emails automatically.
'''
pass
class bugzilla(object):
# supported versions of bugzilla. different versions have
# different schemas.
_versions = {
'2.16': bzmysql,
'2.18': bzmysql_2_18,
'3.0': bzmysql_3_0,
'xmlrpc': bzxmlrpc,
'xmlrpc+email': bzxmlrpcemail,
'restapi': bzrestapi,
}
def __init__(self, ui, repo):
self.ui = ui
self.repo = repo
bzversion = self.ui.config('bugzilla', 'version')
try:
bzclass = bugzilla._versions[bzversion]
except KeyError:
raise error.Abort(_('bugzilla version %s not supported') %
bzversion)
self.bzdriver = bzclass(self.ui)
self.bug_re = re.compile(
self.ui.config('bugzilla', 'regexp'), re.IGNORECASE)
self.fix_re = re.compile(
self.ui.config('bugzilla', 'fixregexp'), re.IGNORECASE)
self.split_re = re.compile(r'\D+')
def find_bugs(self, ctx):
'''return bugs dictionary created from commit comment.
Extract bug info from changeset comments. Filter out any that are
not known to Bugzilla, and any that already have a reference to
the given changeset in their comments.
'''
start = 0
hours = 0.0
bugs = {}
bugmatch = self.bug_re.search(ctx.description(), start)
fixmatch = self.fix_re.search(ctx.description(), start)
while True:
bugattribs = {}
if not bugmatch and not fixmatch:
break
if not bugmatch:
m = fixmatch
elif not fixmatch:
m = bugmatch
else:
if bugmatch.start() < fixmatch.start():
m = bugmatch
else:
m = fixmatch
start = m.end()
if m is bugmatch:
bugmatch = self.bug_re.search(ctx.description(), start)
if 'fix' in bugattribs:
del bugattribs['fix']
else:
fixmatch = self.fix_re.search(ctx.description(), start)
bugattribs['fix'] = None
try:
ids = m.group('ids')
except IndexError:
ids = m.group(1)
try:
hours = float(m.group('hours'))
bugattribs['hours'] = hours
except IndexError:
pass
except TypeError:
pass
except ValueError:
self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
for id in self.split_re.split(ids):
if not id:
continue
bugs[int(id)] = bugattribs
if bugs:
self.bzdriver.filter_real_bug_ids(bugs)
if bugs:
self.bzdriver.filter_cset_known_bug_ids(ctx.node(), bugs)
return bugs
def update(self, bugid, newstate, ctx):
'''update bugzilla bug with reference to changeset.'''
def webroot(root):
'''strip leading prefix of repo root and turn into
url-safe path.'''
count = int(self.ui.config('bugzilla', 'strip'))
root = util.pconvert(root)
while count > 0:
c = root.find('/')
if c == -1:
break
root = root[c + 1:]
count -= 1
return root
mapfile = None
tmpl = self.ui.config('bugzilla', 'template')
if not tmpl:
mapfile = self.ui.config('bugzilla', 'style')
if not mapfile and not tmpl:
tmpl = _('changeset {node|short} in repo {root} refers '
'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
spec = logcmdutil.templatespec(tmpl, mapfile)
t = logcmdutil.changesettemplater(self.ui, self.repo, spec)
self.ui.pushbuffer()
t.show(ctx, changes=ctx.changeset(),
bug=str(bugid),
hgweb=self.ui.config('web', 'baseurl'),
root=self.repo.root,
webroot=webroot(self.repo.root))
data = self.ui.popbuffer()
self.bzdriver.updatebug(bugid, newstate, data,
stringutil.email(ctx.user()))
def notify(self, bugs, committer):
'''ensure Bugzilla users are notified of bug change.'''
self.bzdriver.notify(bugs, committer)
def hook(ui, repo, hooktype, node=None, **kwargs):
'''add comment to bugzilla for each changeset that refers to a
bugzilla bug id. only add a comment once per bug, so same change
seen multiple times does not fill bug with duplicate data.'''
if node is None:
raise error.Abort(_('hook type %s does not pass a changeset id') %
hooktype)
try:
bz = bugzilla(ui, repo)
ctx = repo[node]
bugs = bz.find_bugs(ctx)
if bugs:
for bug in bugs:
bz.update(bug, bugs[bug], ctx)
bz.notify(bugs, stringutil.email(ctx.user()))
except Exception as e:
raise error.Abort(_('Bugzilla error: %s') % e)
| 36.855752
| 80
| 0.608159
|
4330f9467f7c110c1a4825b89431b4731cd35026
| 13,904
|
py
|
Python
|
benchmarks/generate_benchmark_params.py
|
YannCabanes/geomstats
|
ce3f4bab6cd59c2f071371a46e336086771d0493
|
[
"MIT"
] | null | null | null |
benchmarks/generate_benchmark_params.py
|
YannCabanes/geomstats
|
ce3f4bab6cd59c2f071371a46e336086771d0493
|
[
"MIT"
] | null | null | null |
benchmarks/generate_benchmark_params.py
|
YannCabanes/geomstats
|
ce3f4bab6cd59c2f071371a46e336086771d0493
|
[
"MIT"
] | null | null | null |
"""Benchmarking parameters generation file."""
import argparse
from itertools import chain, product
import pandas as pd
parser = argparse.ArgumentParser(
description="Generate parameters for which benchmark is run"
)
parser.add_argument(
"-m",
"--manifold",
type=str,
default="all",
help="Manifold for which benchmark is run. 'all' denotes all manifolds present.",
)
parser.add_argument(
"-n",
"--n_samples",
type=int,
default=10,
help="Number of samples for which benchmark is run",
)
args = parser.parse_args()
def spd_manifold_params(n_samples):
"""Generate spd manifold benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "SPDMatrices"
manifold_args = [(2,), (5,), (10,)]
module = "geomstats.geometry.spd_matrices"
def spd_affine_metric_params():
params = []
metric = "SPDMetricAffine"
power_args = [-0.5, 1, 0.5]
metric_args = list(product([item for item, in manifold_args], power_args))
manifold_args_re = [
item for item in manifold_args for i in range(len(power_args))
]
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args_re, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
def spd_bures_wasserstein_metric_params():
params = []
metric = "SPDMetricBuresWasserstein"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
def spd_euclidean_metric_params():
params = []
metric = "SPDMetricEuclidean"
power_args = [-0.5, 1, 0.5]
metric_args = list(product([item for item, in manifold_args], power_args))
manifold_args_re = [
item for item in manifold_args for i in range(len(power_args))
]
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args_re, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
def spd_log_euclidean_metric_params():
params = []
metric = "SPDMetricLogEuclidean"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return list(
chain(
*[
spd_bures_wasserstein_metric_params(),
spd_affine_metric_params(),
spd_euclidean_metric_params(),
spd_log_euclidean_metric_params(),
]
)
)
def stiefel_params(n_samples):
"""Generate stiefel benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Stiefel"
manifold_args = [(3, 2), (4, 3)]
module = "geomstats.geometry.stiefel"
def stiefel_canonical_metric_params():
params = []
metric = "StiefelCanonicalMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return stiefel_canonical_metric_params()
def pre_shape_params(n_samples):
"""Generate pre shape benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "PreShapeSpace"
manifold_args = [(3, 3), (5, 5)]
module = "geomstats.geometry.pre_shape"
def pre_shape_metric_params():
params = []
metric = "PreShapeMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return pre_shape_metric_params()
def positive_lower_triangular_matrices_params(n_samples):
"""Generate positive lower triangular matrices benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "PositiveLowerTriangularMatrices"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.positive_lower_triangular_matrices"
def cholesky_metric_params():
params = []
metric = "CholeskyMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return cholesky_metric_params()
def minkowski_params(n_samples):
"""Generate minkowski benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Minkowski"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.minkowski"
def minkowski_metric_params():
params = []
metric = "MinkowskiMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return minkowski_metric_params()
def matrices_params(n_samples):
"""Generate matrices benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Matrices"
manifold_args = [(3, 3), (5, 5)]
module = "geomstats.geometry.matrices"
def matrices_metric_params():
params = []
metric = "MatricesMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return matrices_metric_params()
def hypersphere_params(n_samples):
"""Generate hypersphere benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Hypersphere"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.hypersphere"
def hypersphere_metric_params():
params = []
metric = "HypersphereMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return hypersphere_metric_params()
def grassmanian_params(n_samples):
"""Generate grassmanian parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Grassmannian"
manifold_args = [(4, 3), (5, 4)]
module = "geomstats.geometry.grassmannian"
def grassmannian_canonical_metric_params():
params = []
metric = "GrassmannianCanonicalMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return grassmannian_canonical_metric_params()
def full_rank_correlation_matrices_params(n_samples):
"""Generate full rank correlation matrices benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "FullRankCorrelationMatrices"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.full_rank_correlation_matrices"
def full_rank_correlation_affine_quotient_metric_params():
params = []
metric = "FullRankCorrelationAffineQuotientMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return full_rank_correlation_affine_quotient_metric_params()
def hyperboloid_params(n_samples):
"""Generate hyperboloid benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "Hyperboloid"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.hyperboloid"
def hyperboloid_metric_params():
params = []
metric = "HyperboloidMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return hyperboloid_metric_params()
def poincare_ball_params(n_samples):
"""Generate poincare ball benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "PoincareBall"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.poincare_ball"
def poincare_ball_metric_params():
params = []
metric = "PoincareBallMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return poincare_ball_metric_params()
def poincare_half_space_params(n_samples):
"""Generate poincare half space benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "PoincareHalfSpace"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.poincare_half_space"
def poincare_half_space_metric_params():
params = []
metric = "PoincareHalfSpaceMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return poincare_half_space_metric_params()
def poincare_polydisk_params(n_samples):
"""Generate poincare polydisk benchmarking parameters.
Parameters
----------
n_samples : int
Number of samples to be used.
Returns
-------
_ : list.
List of params.
"""
manifold = "PoincarePolydisk"
manifold_args = [(3,), (5,)]
module = "geomstats.geometry.poincare_polydisk"
def poincare_poly_disk_metric_params():
params = []
metric = "PoincarePolydiskMetric"
metric_args = manifold_args
kwargs = {}
common = manifold, module, metric, n_samples, kwargs
for manifold_arg, metric_arg in zip(manifold_args, metric_args):
params += [common + (manifold_arg, metric_arg)]
return params
return poincare_poly_disk_metric_params()
manifolds = [
"spd_manifold",
"stiefel",
"pre_shape",
"positive_lower_triangular_matrices",
"minkowski",
"matrices",
"hypersphere",
"grassmanian",
"hyperboloid",
"poincare_ball",
"poincare_half_space",
]
def generate_benchmark_params(manifold="all", n_samples=10):
"""Generate parameters for benchmarking.
Parameters
----------
manifold : str
Manifold name or all.
Optional, default "all".
n_samples : int
Number of samples.
Optional, default 10.
"""
params_list = []
manifolds_list = manifolds if manifold == "all" else [manifold]
params_list = [
globals()[manifold + "_params"](n_samples) for manifold in manifolds_list
]
params_list = list(chain(*params_list))
df = pd.DataFrame(
params_list,
columns=[
"manifold",
"module",
"metric",
"n_samples",
"exp_kwargs",
"manifold_args",
"metric_args",
],
)
df.to_pickle("benchmark_params.pkl")
print("Generated params at benchmark_params.pkl.")
def main():
"""Generate Benchmark Params."""
generate_benchmark_params(args.manifold, args.n_samples)
if __name__ == "__main__":
main()
| 26.636015
| 85
| 0.617161
|
ad1afd439e26fd72c45d39b44ff836428275dbd6
| 380
|
py
|
Python
|
django_fixtures/unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
django_fixtures/unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
django_fixtures/unittests.py
|
retxxxirt/django-fixtures
|
8a8d3c1ac49291716c02efe56ed0b9697b93370c
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from .fixtures import RequiredFixturesMixin, FixturesData
class FixturesTestCase(TestCase, RequiredFixturesMixin):
@classmethod
def setUpClass(cls):
initial_fixtures, cls.fixtures = FixturesData(cls.fixtures or []), []
super().setUpClass()
cls.fixtures = initial_fixtures
cls._load_required_fixtures()
| 25.333333
| 77
| 0.726316
|
3ebb7738c5109cb46e77bedfa17fa8fc1d8ad1bc
| 414
|
py
|
Python
|
testprograms/MainLarge_slave.py
|
jchmrt/ev3dev2simulator
|
3a8968162d1658a82860a613caf9986c5428b124
|
[
"MIT"
] | 2
|
2020-07-14T01:31:14.000Z
|
2021-02-22T19:14:12.000Z
|
testprograms/MainLarge_slave.py
|
jchmrt/ev3dev2simulator
|
3a8968162d1658a82860a613caf9986c5428b124
|
[
"MIT"
] | 19
|
2020-02-16T08:11:23.000Z
|
2020-12-10T10:06:36.000Z
|
testprograms/MainLarge_slave.py
|
jchmrt/ev3dev2simulator
|
3a8968162d1658a82860a613caf9986c5428b124
|
[
"MIT"
] | 10
|
2020-03-02T08:37:29.000Z
|
2022-03-06T03:49:07.000Z
|
#!/usr/bin/env python3
from ev3dev2._platform.ev3 import INPUT_1, INPUT_4, INPUT_2, INPUT_3, OUTPUT_B
from ev3dev2.led import Leds
from ev3dev2.sensor.lego import TouchSensor
from testprograms.BluetoothHelper import BluetoothHelper
bth = BluetoothHelper()
bth.connect_as_client()
leds = Leds()
leds.animate_rainbow()
ts1 = TouchSensor(INPUT_2)
ts2 = TouchSensor(INPUT_3)
while True:
leds.animate_rainbow()
| 23
| 78
| 0.797101
|
d536aa27b78509b6f1783f2d586aae9370b665fc
| 7,866
|
py
|
Python
|
code/glow_pytorch/hparams_tuning.py
|
jonepatr/lets_face_it
|
fefba5e82d236f89703449bd517cfa5867fda09f
|
[
"MIT"
] | 11
|
2020-10-21T09:58:53.000Z
|
2022-01-22T08:31:57.000Z
|
code/glow_pytorch/hparams_tuning.py
|
jonepatr/lets_face_it
|
fefba5e82d236f89703449bd517cfa5867fda09f
|
[
"MIT"
] | 3
|
2021-05-05T07:15:45.000Z
|
2021-12-14T14:43:42.000Z
|
code/glow_pytorch/hparams_tuning.py
|
jonepatr/lets_face_it
|
fefba5e82d236f89703449bd517cfa5867fda09f
|
[
"MIT"
] | 4
|
2020-10-21T09:46:22.000Z
|
2021-12-16T11:41:03.000Z
|
import json
import multiprocessing
import os
import shutil
import socket
from argparse import ArgumentParser, Namespace
from pprint import pprint
from glow_pytorch.glow.utils import calc_jerk, get_longest_history
import numpy as np
import optuna
import pytorch_lightning as pl
import torch
import yaml
from jsmin import jsmin
from optuna.integration import PyTorchLightningPruningCallback
from pytorch_lightning import Trainer, seed_everything
from glow_pytorch.glow.lets_face_it_glow import LetsFaceItGlow
from glow_pytorch.hparam_tuning_configs import hparam_configs
from misc.shared import CONFIG, DATA_DIR, RANDOM_SEED
from misc.utils import get_training_name
seed_everything(RANDOM_SEED)
class FailedTrial(Exception):
pass
class MyEarlyStopping(PyTorchLightningPruningCallback):
def __init__(self, trial, monitor="val_loss", patience=2):
super().__init__(trial, monitor=monitor)
self.best_loss = torch.tensor(np.Inf)
self.wait = 0
self.patience = patience
self.jerk_generated_means = []
def on_train_batch_end(
self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
):
super().on_validation_batch_end(
trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
)
if pl_module.global_step > 20 and outputs > 0:
message = f"Trial was pruned since loss > 0"
raise optuna.exceptions.TrialPruned(message)
def on_validation_epoch_start(self, trainer, pl_module):
self.jerk_generated_means = []
def on_validation_batch_end(
self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
):
super().on_validation_batch_end(
trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
)
seq_len = pl_module.hparams.Validation["seq_len"]
new_batch = {x: y.type_as(outputs) for x, y in batch.items()}
cond_data = {
"p1_face": new_batch["p1_face"][
:, : get_longest_history(pl_module.hparams.Conditioning)
],
"p2_face": new_batch.get("p2_face"),
"p1_speech": new_batch.get("p1_speech"),
"p2_speech": new_batch.get("p2_speech"),
}
predicted_seq = pl_module.seq_glow.inference(seq_len, data=cond_data)
self.jerk_generated_means.append(calc_jerk(predicted_seq))
if pl_module.global_step > 20 and outputs > 0:
message = f"Trial was pruned since loss > 0"
raise optuna.exceptions.TrialPruned(message)
def on_validation_epoch_end(
self, trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
):
super().on_validation_epoch_end(
trainer, pl_module, outputs, batch, batch_idx, dataloader_idx
)
jerk_generated_mean = torch.stack(self.jerk_generated_means).mean()
val_loss = torch.stack(outputs).mean()
if jerk_generated_mean > 10 and pl_module.global_step > 20:
message = f"Trial was pruned since jerk > 5"
raise optuna.exceptions.TrialPruned(message)
if val_loss is not None and val_loss > 0:
message = f"Trial was pruned because val loss was too high {val_loss}."
raise optuna.exceptions.TrialPruned(message)
if val_loss < self.best_loss:
self.best_loss = val_loss
self.wait = 0
else:
self.wait += 1
if self.wait >= self.patience:
return True
parser = ArgumentParser()
parser.add_argument("hparams_file")
parser.add_argument("-n", type=int)
parser = Trainer.add_argparse_args(parser)
default_params = parser.parse_args()
parser2 = ArgumentParser()
parser2.add_argument("hparams_file")
parser2.add_argument("-n", type=int)
override_params, unknown = parser2.parse_known_args()
conf_name = (
os.path.basename(override_params.hparams_file)
.replace(".yaml", "")
.replace(".json", "")
)
def prepare_hparams(trial):
if override_params.hparams_file.endswith(".json"):
hparams_json = json.loads(jsmin(open(override_params.hparams_file).read()))
elif override_params.hparams_file.endswith(".yaml"):
hparams_json = yaml.load(open(override_params.hparams_file))
hparams_json["dataset_root"] = str(DATA_DIR)
params = vars(default_params)
params.update(hparams_json)
params.update(vars(override_params))
hparams = Namespace(**params)
return hparam_configs[conf_name].hparam_options(hparams, trial)
def run(hparams, return_dict, trial, batch_size, current_date):
log_path = os.path.join("logs", conf_name, f"{current_date}")
if os.path.exists(log_path):
shutil.rmtree(log_path)
hparams.batch_size = batch_size
trainer_params = vars(hparams).copy()
trainer_params["checkpoint_callback"] = pl.callbacks.ModelCheckpoint(
save_top_k=3, monitor="val_loss", mode="min"
)
if CONFIG["comet"]["api_key"]:
from pytorch_lightning.loggers import CometLogger
trainer_params["logger"] = CometLogger(
api_key=CONFIG["comet"]["api_key"],
project_name=CONFIG["comet"]["project_name"],
experiment_name=conf_name, # + current_date
)
trainer_params["early_stop_callback"] = MyEarlyStopping(trial, monitor="val_loss")
trainer = Trainer(**trainer_params)
model = LetsFaceItGlow(hparams)
try:
trainer.fit(model)
except RuntimeError as e:
if str(e).startswith("CUDA out of memory"):
return_dict["OOM"] = True
else:
return_dict["error"] = e
except (optuna.exceptions.TrialPruned, Exception) as e:
return_dict["error"] = e
for key, item in trainer.callback_metrics.items():
return_dict[key] = float(item)
def objective(trial):
current_date = get_training_name()
manager = multiprocessing.Manager()
hparams = prepare_hparams(trial)
batch_size = hparams.batch_size
trial.set_user_attr("version", current_date)
trial.set_user_attr("host", socket.gethostname())
trial.set_user_attr("GPU", os.environ.get("CUDA_VISIBLE_DEVICES"))
pprint(vars(hparams))
while batch_size > 0:
print(f"trying with batch_size {batch_size}")
return_dict = manager.dict()
p = multiprocessing.Process(
target=run,
args=(hparams, return_dict, trial, batch_size, current_date),
)
p.start()
p.join()
print(return_dict)
if return_dict.get("OOM"):
new_batch_size = batch_size // 2
if new_batch_size < 2:
raise FailedTrial("batch size smaller than 2!")
else:
batch_size = new_batch_size
elif return_dict.get("error"):
raise return_dict.get("error")
else:
break
trial.set_user_attr("batch_size", batch_size)
for metric, val in return_dict.items():
if metric != "val_loss":
trial.set_user_attr(metric, float(val))
return float(return_dict["val_loss"])
if __name__ == "__main__":
conf_vars = {}
if CONFIG["optuna"]["rdbs_storage"]:
conf_vars["storage"] = optuna.storages.RDBStorage(
url=CONFIG["optuna"]["rdbs_storage"],
)
study = optuna.create_study(
**conf_vars,
study_name=conf_name,
direction="minimize",
pruner=optuna.pruners.NopPruner(),
load_if_exists=True,
)
study.optimize(objective, n_trials=override_params.n, catch=(FailedTrial,))
print("Number of finished trials: {}".format(len(study.trials)))
print("Best trial:")
trial = study.best_trial
print(" Value: {}".format(trial.value))
print(" Params: ")
for key, value in trial.params.items():
print(" {}: {}".format(key, value))
| 31.97561
| 86
| 0.663361
|
651706a4de46693005f32788864d0b406e2a9c27
| 5,917
|
py
|
Python
|
test/functional/bsv-pbv-processingorder.py
|
bxlkm1/yulecoin
|
3605faf2ff2e3c7bd381414613fc5c0234ad2936
|
[
"OML"
] | 8
|
2019-08-02T02:49:42.000Z
|
2022-01-17T15:51:48.000Z
|
test/functional/bsv-pbv-processingorder.py
|
bxlkm1/yulecoin
|
3605faf2ff2e3c7bd381414613fc5c0234ad2936
|
[
"OML"
] | null | null | null |
test/functional/bsv-pbv-processingorder.py
|
bxlkm1/yulecoin
|
3605faf2ff2e3c7bd381414613fc5c0234ad2936
|
[
"OML"
] | 4
|
2019-08-02T02:50:44.000Z
|
2021-05-28T03:21:38.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2019 Bitcoin Association
# Distributed under the Open BSV software license, see the accompanying file LICENSE.
"""
Testing that we correctly accept blocks even if due to race condition child
block arrives to best chain activation stage before parent.
We have the following case:
1
|
2
First we send block 1 but block it during validation, then we
send block 2 and after a short while resume block 1. Block 2 should be the best
tip in the end.
What happens:
1. block 1 is added to validation blocking list
2. block 1 is received
3. cs_main lock is obtained, block 1 is added to block index pool (mapBlockIndex),
stored to disk and
4. cs_main is released
*-1
5. block 2 is received
6. cs_main lock is obtained, block 2 is added to block index pool (mapBlockIndex),
stored to disk and
*-2
7. both blocks are in validation stage and one of the validations is canceled
as the other is already running (there can't be more than one block from a
group in validation if that group consists of block that are descendants of
one another) - which validation stage is rejected depends on when the race
condition occurs (locations marked with a *-number)
both blocks arrive to ActivateBestChain() function and activation of one
block is canceled as the other is already running.
Reason: If two blocks share common ancestor (paths towards those two blocks
contain common blocks) we prevent validation of both as we don't want to
validate a single block twice and compete between themselves for the common
goal.
Which validation paths is rejected depends on when the race condition occurs
(locations marked with a *-number).
The one ActivateBestChain() call that remains will continue until there are
no more better chain candidates so even if block 1 starts the validation first
and block 2 activation is canceled block 2 will be added by block 1
ActivateBestChain() call.
8. block 1 is removed from validation blocking list
9. both blocks are now part of the active chain and a log entry for the 7. exists
Race condition can occur anywhere where cs_main is released before the end of
block processing but if that case doesn't manifest itself on its own we make
sure it occurs deterministically by blocking the validation of the block 1 as
even if block 2 gets to validation stage first it still has to validate block 1
since in that case the parent (block 1) has not been validated yet - this way
we certainly get an attempt of trying to validate block 1 by two checker queues
at the same time.
"""
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
NetworkThread,
NodeConn,
NodeConnCB,
msg_block,
)
from test_framework.test_framework import BitcoinTestFramework, ChainManager
from test_framework.util import (
assert_equal,
p2p_port,
wait_until
)
from bsv_pbv_common import wait_for_waiting_blocks
from test_framework.script import *
from test_framework.blocktools import create_transaction
from test_framework.key import CECKey
import glob
class PBVProcessingOrder(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.chain = ChainManager()
self.extra_args = [["-whitelist=127.0.0.1"]]
def run_test(self):
block_count = 0
# Create a P2P connections
node0 = NodeConnCB()
connection = NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0)
node0.add_connection(connection)
NetworkThread().start()
# wait_for_verack ensures that the P2P connection is fully up.
node0.wait_for_verack()
self.chain.set_genesis_hash(int(self.nodes[0].getbestblockhash(), 16))
block = self.chain.next_block(block_count)
block_count += 1
self.chain.save_spendable_output()
node0.send_message(msg_block(block))
for i in range(100):
block = self.chain.next_block(block_count)
block_count += 1
self.chain.save_spendable_output()
node0.send_message(msg_block(block))
out = []
for i in range(100):
out.append(self.chain.get_spendable_output())
self.log.info("waiting for block height 101 via rpc")
self.nodes[0].waitforblockheight(101)
# wait till validation of block or blocks finishes
node0.sync_with_ping()
block1 = self.chain.next_block(block_count, spend=out[0], extra_txns=8)
block_count += 1
# send block but block him at validation point
self.nodes[0].waitaftervalidatingblock(block1.hash, "add")
node0.send_message(msg_block(block1))
self.log.info(f"block1 hash: {block1.hash}")
# make sure block hash is in waiting list
wait_for_waiting_blocks({block1.hash}, self.nodes[0], self.log)
# send child block
block2 = self.chain.next_block(block_count, spend=out[1], extra_txns=10)
block_count += 1
node0.send_message(msg_block(block2))
self.log.info(f"block2 hash: {block2.hash}")
def wait_for_log():
line_text = block2.hash + " will not be considered by the current"
for line in open(glob.glob(self.options.tmpdir + "/node0" + "/regtest/bitcoind.log")[0]):
if line_text in line:
self.log.info("Found line: %s", line)
return True
return False
wait_until(wait_for_log)
self.nodes[0].waitaftervalidatingblock(block1.hash, "remove")
# wait till validation of block or blocks finishes
node0.sync_with_ping()
# block that arrived last on competing chain should be active
assert_equal(block2.hash, self.nodes[0].getbestblockhash())
if __name__ == '__main__':
PBVProcessingOrder().main()
| 38.174194
| 101
| 0.709481
|
cb5658ac19ef730efc2352b985a7c4ce7665483c
| 2,607
|
py
|
Python
|
ietf/community/migrations/0002_auto_20180220_1052.py
|
hassanakbar4/ietfdb
|
cabee059092ae776015410640226064331c293b7
|
[
"BSD-3-Clause"
] | 25
|
2022-03-05T08:26:52.000Z
|
2022-03-30T15:45:42.000Z
|
ietf/community/migrations/0002_auto_20180220_1052.py
|
hassanakbar4/ietfdb
|
cabee059092ae776015410640226064331c293b7
|
[
"BSD-3-Clause"
] | 219
|
2022-03-04T17:29:12.000Z
|
2022-03-31T21:16:14.000Z
|
ietf/community/migrations/0002_auto_20180220_1052.py
|
hassanakbar4/ietfdb
|
cabee059092ae776015410640226064331c293b7
|
[
"BSD-3-Clause"
] | 22
|
2022-03-04T15:34:34.000Z
|
2022-03-28T13:30:59.000Z
|
# Copyright The IETF Trust 2018-2020, All Rights Reserved
# -*- coding: utf-8 -*-
# Generated by Django 1.11.10 on 2018-02-20 10:52
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import ietf.utils.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('doc', '0001_initial'),
('group', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('community', '0001_initial'),
('person', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='searchrule',
name='group',
field=ietf.utils.models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='group.Group'),
),
migrations.AddField(
model_name='searchrule',
name='name_contains_index',
field=models.ManyToManyField(to='doc.Document'),
),
migrations.AddField(
model_name='searchrule',
name='person',
field=ietf.utils.models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='person.Person'),
),
migrations.AddField(
model_name='searchrule',
name='state',
field=ietf.utils.models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='doc.State'),
),
migrations.AddField(
model_name='emailsubscription',
name='community_list',
field=ietf.utils.models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='community.CommunityList'),
),
migrations.AddField(
model_name='emailsubscription',
name='email',
field=ietf.utils.models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='person.Email'),
),
migrations.AddField(
model_name='communitylist',
name='added_docs',
field=models.ManyToManyField(to='doc.Document'),
),
migrations.AddField(
model_name='communitylist',
name='group',
field=ietf.utils.models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='group.Group'),
),
migrations.AddField(
model_name='communitylist',
name='user',
field=ietf.utils.models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 36.71831
| 144
| 0.61987
|
06d97dad460660d257491a1bff579ba6e970000b
| 769
|
py
|
Python
|
project/admin.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
project/admin.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
project/admin.py
|
roshanba/mangal
|
f7b428811dc07214009cc33f0beb665ead402038
|
[
"bzip2-1.0.6",
"MIT"
] | null | null | null |
from django.contrib import admin
from project.models import Reservation, User, Menu
from django.contrib.auth.admin import UserAdmin
# Register your models here.
class ReservationAdmin(admin.ModelAdmin):
list_display = ['first_name', 'last_name',
'email', 'people', 'time', 'phone',
'date_reserved', 'status']
class MyUserAdmin(UserAdmin):
model = User
fieldsets = UserAdmin.fieldsets + (
(None, {'fields': ('picture',)}),
)
class MenuAdmin(admin.ModelAdmin):
list_display = ['name', 'price','picture',
'description', 'type', 'category','location']
admin.site.register(User, MyUserAdmin)
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Menu, MenuAdmin)
| 29.576923
| 65
| 0.664499
|
11660db8b9a2b8826662289b3fc414ba1eefb3d9
| 84
|
py
|
Python
|
timi_robot/__init__.py
|
lxl0928/wechat_work_logger_robot
|
a8e3e968e31c94a5ae8dee573a1bc4f7dccd7d95
|
[
"Apache-2.0"
] | 1
|
2019-12-16T18:20:42.000Z
|
2019-12-16T18:20:42.000Z
|
timi_robot/__init__.py
|
lxl0928/wechat_work_logger_robot
|
a8e3e968e31c94a5ae8dee573a1bc4f7dccd7d95
|
[
"Apache-2.0"
] | null | null | null |
timi_robot/__init__.py
|
lxl0928/wechat_work_logger_robot
|
a8e3e968e31c94a5ae8dee573a1bc4f7dccd7d95
|
[
"Apache-2.0"
] | 1
|
2021-04-11T04:36:50.000Z
|
2021-04-11T04:36:50.000Z
|
# coding: utf-8
from timi_robot.logger import SensoroLogger as SensoroLoggerClient
| 21
| 66
| 0.833333
|
4b9873d632dd92a7ab446f10f8bee4dc53b10b68
| 38,017
|
py
|
Python
|
services/core/BACnetProxy/bacnet_proxy/agent.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | 1
|
2020-06-08T16:54:28.000Z
|
2020-06-08T16:54:28.000Z
|
services/core/BACnetProxy/bacnet_proxy/agent.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | 8
|
2016-10-07T22:49:28.000Z
|
2022-02-23T00:57:58.000Z
|
services/core/BACnetProxy/bacnet_proxy/agent.py
|
craig8/volttron
|
2a954311d323effa3b79c2a53f6e8c3bb9664e1c
|
[
"Apache-2.0",
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2020, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
import logging
import sys
import datetime
from volttron.platform.vip.agent import Agent, RPC
from volttron.platform.async_ import AsyncCall
from volttron.platform.agent import utils
from volttron.platform.messaging import topics, headers
utils.setup_logging()
_log = logging.getLogger(__name__)
bacnet_logger = logging.getLogger("bacpypes")
bacnet_logger.setLevel(logging.WARNING)
__version__ = '0.5'
from collections import defaultdict
from queue import Queue, Empty
from bacpypes.task import RecurringTask
import bacpypes.core
import threading
# Tweeks to BACpypes to make it play nice with Gevent.
bacpypes.core.enable_sleeping()
from bacpypes.pdu import Address, GlobalBroadcast
from bacpypes.app import BIPSimpleApplication
from bacpypes.service.device import LocalDeviceObject
from bacpypes.object import get_datatype
from bacpypes.apdu import (ReadPropertyRequest,
WritePropertyRequest,
Error,
AbortPDU,
RejectPDU,
ReadPropertyACK,
SimpleAckPDU,
ReadPropertyMultipleRequest,
ReadPropertyMultipleACK,
PropertyReference,
ReadAccessSpecification,
encode_max_apdu_length_accepted,
WhoIsRequest,
IAmRequest,
ConfirmedRequestSequence,
SubscribeCOVRequest,
ConfirmedCOVNotificationRequest)
from bacpypes.primitivedata import (Null, Atomic, Enumerated, Integer,
Unsigned, Real)
from bacpypes.constructeddata import Array, Any, Choice
from bacpypes.basetypes import ServicesSupported
from bacpypes.task import TaskManager
from gevent.event import AsyncResult
from volttron.platform.agent.known_identities import PLATFORM_DRIVER
# Make sure the TaskManager singleton exists...
task_manager = TaskManager()
class SubscriptionContext(object):
"""
Object for maintaining BACnet change of value subscriptions with points on a device
"""
def __init__(self, device_path, address, point_name, object_type, instance_number, sub_process_id, lifetime=None):
self.device_path = device_path
self.device_address = address
# Arbitrary value which ties COVRequests to a subscription object
self.subscriberProcessIdentifier = sub_process_id
self.point_name = point_name
self.monitoredObjectIdentifier = (object_type, instance_number)
self.lifetime = lifetime
class BACnetApplication(BIPSimpleApplication, RecurringTask):
def __init__(self, i_am_callback, send_cov_subscription_callback, forward_cov_callback, request_check_interval,
*args):
BIPSimpleApplication.__init__(self, *args)
RecurringTask.__init__(self, request_check_interval)
self.i_am_callback = i_am_callback
self.send_cov_subscription_callback = send_cov_subscription_callback
self.forward_cov_callback = forward_cov_callback
self.request_queue = Queue()
# assigning invoke identifiers
self.nextInvokeID = 1
# keep track of requests to line up responses
self.iocb = {}
# Tracking mechanism for matching COVNotifications to a COV
# subscriptionContext object
self.sub_cov_contexts = {}
self.cov_sub_process_ID = 1
self.install_task()
def process_task(self):
while True:
try:
iocb = self.request_queue.get(False)
except Empty:
break
self.handle_request(iocb)
def submit_request(self, iocb):
self.request_queue.put(iocb)
def get_next_invoke_id(self, addr):
"""Called to get an unused invoke ID."""
initial_id = self.nextInvokeID
while 1:
invoke_id = self.nextInvokeID
self.nextInvokeID = (self.nextInvokeID + 1) % 256
# see if we've checked for them all
if initial_id == self.nextInvokeID:
raise RuntimeError("no available invoke ID")
# see if this one is used
if (addr, invoke_id) not in self.iocb:
break
return invoke_id
def handle_request(self, iocb):
apdu = iocb.ioRequest
if isinstance(apdu, ConfirmedRequestSequence):
# assign an invoke identifier
apdu.apduInvokeID = self.get_next_invoke_id(apdu.pduDestination)
# build a key to reference the IOCB when the response comes back
invoke_key = (apdu.pduDestination, apdu.apduInvokeID)
# keep track of the request
self.iocb[invoke_key] = iocb
try:
self.request(apdu)
except Exception as e:
iocb.set_exception(e)
def _get_iocb_key_for_apdu(self, apdu):
return apdu.pduSource, apdu.apduInvokeID
def _get_iocb_for_apdu(self, apdu, invoke_key):
# find the request
working_iocb = self.iocb.get(invoke_key, None)
if working_iocb is None:
_log.error("no matching request for confirmation")
return None
del self.iocb[invoke_key]
if isinstance(apdu, AbortPDU):
working_iocb.set_exception(RuntimeError("Device communication aborted: " + str(apdu)))
return None
elif isinstance(apdu, Error):
working_iocb.set_exception(RuntimeError("Error during device communication: " + str(apdu)))
return None
elif isinstance(apdu, RejectPDU):
working_iocb.set_exception(
RuntimeError("Device at {source} rejected the request: {reason}".format(
source=apdu.pduSource, reason=apdu.apduAbortRejectReason)))
return None
else:
return working_iocb
def _get_value_from_read_property_request(self, apdu, working_iocb):
# find the datatype
#_log.debug("WIGGEDYWACKYO")
datatype = get_datatype(apdu.objectIdentifier[0], apdu.propertyIdentifier)
if not datatype:
working_iocb.set_exception(TypeError("unknown datatype"))
return
# special case for array parts, others are managed by cast_out
if issubclass(datatype, Array) and apdu.propertyArrayIndex is not None:
if apdu.propertyArrayIndex == 0:
value = apdu.propertyValue.cast_out(Unsigned)
else:
value = apdu.propertyValue.cast_out(datatype.subtype)
else:
value = apdu.propertyValue.cast_out(datatype)
if issubclass(datatype, Enumerated):
value = datatype(value).get_long()
return value
def _get_value_from_property_value(self, property_value, datatype, working_iocb):
value = property_value.cast_out(datatype)
if issubclass(datatype, Enumerated):
value = datatype(value).get_long()
try:
if issubclass(datatype, Array) and issubclass(datatype.subtype, Choice):
new_value = []
for item in value.value[1:]:
result = list(item.dict_contents().values())
if result[0] != ():
new_value.append(result[0])
else:
new_value.append(None)
value = new_value
except Exception as e:
_log.exception(e)
working_iocb.set_exception(e)
return
return value
def confirmation(self, apdu):
# return iocb if exists, otherwise sets error and returns
invoke_key = self._get_iocb_key_for_apdu(apdu)
working_iocb = self._get_iocb_for_apdu(apdu, invoke_key)
if not working_iocb:
return
if isinstance(working_iocb.ioRequest, ReadPropertyRequest) and isinstance(apdu, ReadPropertyACK):
# handle receiving covIncrement read results by calling
# the send_cov_subscription callback if a subscription exists and
# the covIncrement is valid
value = self._get_value_from_read_property_request(apdu, working_iocb)
if apdu.propertyIdentifier == 'covIncrement':
_log.debug("received read covIncrement property response from {}".format(apdu.pduSource))
subscription = None
subscription_id = -1
for key, sub in self.sub_cov_contexts.items():
if sub.device_address == apdu.pduSource and \
sub.monitoredObjectIdentifier[0] == apdu.objectIdentifier[0] and \
sub.monitoredObjectIdentifier[1] == apdu.objectIdentifier[1]:
subscription = sub
subscription_id = key
if subscription:
if value:
_log.info("covIncrement is {} for point {} on device".format(
value, subscription.point_name, subscription.device_path))
self.send_cov_subscription_callback(apdu.pduSource,
subscription.subscriberProcessIdentifier,
subscription.monitoredObjectIdentifier,
subscription.lifetime,
subscription.point_name)
else:
_log.warning("point {} on device {} does not have a valid covIncrement property")
self.bacnet_application.sub_cov_contexts.pop(subscription_id)
else:
_log.error('Received read covIncrement response, but no subscription context exists for {} on {}'.
format(subscription.device_path, subscription.point_name))
else:
working_iocb.set(value)
return
elif isinstance(working_iocb.ioRequest, WritePropertyRequest) and isinstance(apdu, SimpleAckPDU):
working_iocb.set(apdu)
return
# Simple record-keeping for subscription request responses
elif isinstance(working_iocb.ioRequest, SubscribeCOVRequest) and isinstance(apdu, SimpleAckPDU):
_log.debug("COV subscription established for {} on {}".format(
working_iocb.ioRequest.monitoredObjectIdentifer, working_iocb.ioRequest.pduSource))
working_iocb.set(apdu)
return
elif isinstance(working_iocb.ioRequest, SubscribeCOVRequest) and not isinstance(apdu, SimpleAckPDU):
_log.error("The SubscribeCOVRequest for {} failed to establish a subscription.".format(
SubscribeCOVRequest.monitoredObjectIdentifier))
return
elif isinstance(working_iocb.ioRequest, ReadPropertyMultipleRequest) and \
isinstance(apdu, ReadPropertyMultipleACK):
result_dict = {}
for result in apdu.listOfReadAccessResults:
# here is the object identifier
object_identifier = result.objectIdentifier
# now come the property values per object
for element in result.listOfResults:
# get the property and array index
property_identifier = element.propertyIdentifier
property_array_index = element.propertyArrayIndex
# here is the read result
read_result = element.readResult
# check for an error
if read_result.propertyAccessError is not None:
error_obj = read_result.propertyAccessError
msg = 'ERROR DURING SCRAPE of {2} (Class: {0} Code: {1})'
_log.error(msg.format(error_obj.errorClass, error_obj.errorCode, object_identifier))
else:
# here is the value
property_value = read_result.propertyValue
# find the datatype
datatype = get_datatype(object_identifier[0], property_identifier)
if not datatype:
working_iocb.set_exception(TypeError("unknown datatype"))
return
# special case for array parts, others are managed
# by cast_out
if issubclass(datatype, Array) and property_array_index is not None:
if property_array_index == 0:
value = property_value.cast_out(Unsigned)
else:
value = property_value.cast_out(datatype.subtype)
else:
value = self._get_value_from_property_value(property_value, datatype, working_iocb)
result_dict[object_identifier[0], object_identifier[1], property_identifier,
property_array_index] = value
working_iocb.set(result_dict)
else:
_log.error("For invoke key {key} Unsupported Request Response pair Request: {request} Response: {response}".
format(key=invoke_key, request=working_iocb.ioRequest, response=apdu))
working_iocb.set_exception(TypeError('Unsupported Request Type'))
def indication(self, apdu):
if isinstance(apdu, IAmRequest):
device_type, device_instance = apdu.iAmDeviceIdentifier
if device_type != 'device':
# Bail without an error.
return
_log.debug("Calling IAm callback.")
self.i_am_callback(str(apdu.pduSource),
device_instance,
apdu.maxAPDULengthAccepted,
str(apdu.segmentationSupported),
apdu.vendorID)
elif isinstance(apdu, ConfirmedCOVNotificationRequest):
# Handling for ConfirmedCOVNotificationRequests. These requests are
# sent by the device when a point with a COV subscription updates
# past the covIncrement threshold(See COV_Detection class in
# Bacpypes:
# https://bacpypes.readthedocs.io/en/latest/modules/service/cov.html)
_log.debug("ConfirmedCOVNotificationRequest received from {}".format(apdu.pduSource))
point_name = None
device_path = None
result_dict = {}
for element in apdu.listOfValues:
property_id = element.propertyIdentifier
if not property_id == "statusFlags":
values = []
for tag in element.value.tagList:
values.append(tag.app_to_object().value)
if len(values) == 1:
result_dict[property_id] = values[0]
else:
result_dict[property_id] = values
if result_dict:
context = self.sub_cov_contexts[apdu.subscriberProcessIdentifier]
point_name = context.point_name
device_path = context.device_path
if point_name and device_path:
self.forward_cov_callback(device_path, point_name, result_dict)
else:
_log.debug("Device {} does not have a subscription context.".format(apdu.monitoredObjectIdentifier))
# forward it along
BIPSimpleApplication.indication(self, apdu)
write_debug_str = "Writing: {target} {type} {instance} {property} (Priority: {priority}, Index: {index}): {value}"
def bacnet_proxy_agent(config_path, **kwargs):
config = utils.load_config(config_path)
device_address = config["device_address"]
max_apdu_len = config.get("max_apdu_length", 1024)
seg_supported = config.get("segmentation_supported", "segmentedBoth")
obj_id = config.get("object_id", 599)
obj_name = config.get("object_name", "Volttron BACnet driver")
ven_id = config.get("vendor_id", 15)
max_per_request = config.get("default_max_per_request", 1000000)
request_check_interval = config.get("request_check_interval", 100)
return BACnetProxyAgent(device_address, max_apdu_len, seg_supported, obj_id, obj_name, ven_id, max_per_request,
request_check_interval=request_check_interval, heartbeat_autostart=True, **kwargs)
class BACnetProxyAgent(Agent):
"""
This agent creates a virtual bacnet device that is used by the bacnet driver interface to communicate with devices.
"""
def __init__(self, device_address, max_apdu_len, seg_supported, obj_id, obj_name, ven_id, max_per_request,
request_check_interval=100, **kwargs):
super(BACnetProxyAgent, self).__init__(**kwargs)
async_call = AsyncCall()
self.bacnet_application = None
# IO callback
class IOCB:
def __init__(self, request):
# requests and responses
self.ioRequest = request
self.ioResult = AsyncResult()
def set(self, value):
async_call.send(None, self.ioResult.set, value)
def set_exception(self, exception):
async_call.send(None, self.ioResult.set_exception, exception)
self.iocb_class = IOCB
self._max_per_request = max_per_request
self.setup_device(async_call, device_address, max_apdu_len, seg_supported, obj_id, obj_name, ven_id,
request_check_interval)
def setup_device(self, async_call, address,
max_apdu_len=1024,
seg_supported='segmentedBoth',
obj_id=599,
obj_name='sMap BACnet driver',
ven_id=15,
request_check_interval=100):
_log.info('seg_supported '+str(seg_supported))
_log.info('max_apdu_len '+str(max_apdu_len))
_log.info('obj_id '+str(obj_id))
_log.info('obj_name '+str(obj_name))
_log.info('ven_id '+str(ven_id))
# Check to see if they gave a valid apdu length.
if encode_max_apdu_length_accepted(max_apdu_len) is None:
raise ValueError("Invalid max_apdu_len: {} Valid options are 50, 128, 206, 480, 1024, and 1476".format(
max_apdu_len))
this_device = LocalDeviceObject(
objectName=obj_name,
objectIdentifier=obj_id,
maxApduLengthAccepted=max_apdu_len,
segmentationSupported=seg_supported,
vendorIdentifier=ven_id,
)
# build a bit string that knows about the bit names.
pss = ServicesSupported()
pss['whoIs'] = 1
pss['iAm'] = 1
# set the property value to be just the bits
this_device.protocolServicesSupported = pss.value
def i_am_callback(address, device_id, max_apdu_len, seg_supported, vendor_id):
async_call.send(None, self.i_am, address, device_id, max_apdu_len, seg_supported, vendor_id)
def send_cov_subscription_callback(device_address, subscriber_process_identifier, monitored_object_identifier,
lifetime, point_name):
"""
Asynchronous cov subscription callback for gevent
"""
async_call.send(None, self.send_cov_subscription, device_address, subscriber_process_identifier,
monitored_object_identifier, lifetime, point_name)
def forward_cov_callback(point_name, apdu, result_dict):
"""
Asynchronous callback to forward cov values to the master driver
for gevent
"""
async_call.send(None, self.forward_cov, point_name, apdu, result_dict)
self.bacnet_application = BACnetApplication(i_am_callback,
send_cov_subscription_callback,
forward_cov_callback,
request_check_interval,
this_device,
address)
# Having a recurring task makes the spin value kind of irrelevant.
kwargs = {"spin": 0.1,
"sigterm": None,
"sigusr1": None}
server_thread = threading.Thread(target=bacpypes.core.run, kwargs=kwargs)
# exit the BACnet App thread when the main thread terminates
server_thread.daemon = True
server_thread.start()
def i_am(self, address, device_id, max_apdu_len, seg_supported, vendor_id):
"""
Called by the BACnet application when a WhoIs is received. Publishes the IAm to the pubsub.
"""
_log.debug("IAm received: Address: {} Device ID: {} Max APDU: {} Segmentation: {} Vendor: {}".format(
address, device_id, max_apdu_len, seg_supported, vendor_id))
header = {headers.TIMESTAMP: utils.format_timestamp(datetime.datetime.utcnow())}
value = {"address": address,
"device_id": device_id,
"max_apdu_length": max_apdu_len,
"segmentation_supported": seg_supported,
"vendor_id": vendor_id}
self.vip.pubsub.publish('pubsub', topics.BACNET_I_AM, header, message=value)
def forward_cov(self, device_path, point_name, result_dict):
"""
Called by the BACnet application when a ConfirmedCOVNotification Request
is received. Publishes the COV to the pubsub through the device's
driver agent
:param device_path: path of the device for use in publish topic
:param point_name: COV notification contains values for this point
:param result_dict: dictionary of values from the point
"""
self.vip.rpc.call(PLATFORM_DRIVER, 'forward_bacnet_cov_value', device_path, point_name, result_dict)
@RPC.export
def who_is(self, low_device_id=None, high_device_id=None, target_address=None):
_log.debug("Sending WhoIs: low_id: {low} high: {high} address: {address}".format(
low=low_device_id, high=high_device_id, address=target_address))
request = WhoIsRequest()
if low_device_id is not None:
request.deviceInstanceRangeLowLimit = low_device_id
if high_device_id is not None:
request.deviceInstanceRangeHighLimit = high_device_id
if target_address is not None:
request.pduDestination = Address(target_address)
else:
request.pduDestination = GlobalBroadcast()
iocb = self.iocb_class(request)
self.bacnet_application.submit_request(iocb)
@RPC.export
def ping_device(self, target_address, device_id):
"""
Ping a device with a whois to potentially setup routing.
"""
_log.debug("Pinging " + target_address)
self.who_is(device_id, device_id, target_address)
def _cast_value(self, value, datatype):
if datatype is Integer:
value = int(value)
elif datatype is Real:
value = float(value)
elif datatype is Unsigned:
value = int(value)
return datatype(value)
@RPC.export
def write_property(self, target_address, value, object_type, instance_number, property_name, priority=None,
index=None):
"""
Write to a property.
"""
_log.debug(write_debug_str.format(target=target_address,
type=object_type,
instance=instance_number,
property=property_name,
priority=priority,
index=index,
value=value))
request = WritePropertyRequest(objectIdentifier=(object_type, instance_number),
propertyIdentifier=property_name)
datatype = get_datatype(object_type, property_name)
bac_value = None
if value is None or value == 'null':
bac_value = Null()
elif issubclass(datatype, Atomic):
bac_value = self._cast_value(value, datatype)
elif issubclass(datatype, Array) and (index is not None):
if index == 0:
bac_value = Integer(value)
elif issubclass(datatype.subtype, Atomic):
bac_value = datatype.subtype(value)
elif not isinstance(value, datatype.subtype):
raise TypeError("invalid result datatype, expecting {}".format(datatype.subtype.__name__,))
elif not isinstance(value, datatype):
raise TypeError("invalid result datatype, expecting %s".format(datatype.__name__,))
request.propertyValue = Any()
request.propertyValue.cast_in(bac_value)
request.pduDestination = Address(target_address)
# Optional index
if index is not None:
request.propertyArrayIndex = index
# Optional priority
if priority is not None:
request.priority = priority
iocb = self.iocb_class(request)
self.bacnet_application.submit_request(iocb)
result = iocb.ioResult.get(10)
if isinstance(result, SimpleAckPDU):
return value
raise RuntimeError("Failed to set value: " + str(result))
def read_using_single_request(self, target_address, point_map):
results = {}
for point, properties in point_map.items():
if len(properties) == 3:
object_type, instance_number, property_name = properties
property_index = None
elif len(properties) == 4:
(object_type, instance_number, property_name,
property_index) = properties
else:
_log.error("skipping {} in request to {}: incorrect number of parameters".format(point, target_address))
continue
try:
results[point] = self.read_property(
target_address, object_type, instance_number, property_name, property_index)
except Exception as e:
_log.error("Error reading point {} from {}: {}".format(point, target_address, e))
return results
@RPC.export
def read_property(self, target_address, object_type, instance_number, property_name, property_index=None):
request = ReadPropertyRequest(
objectIdentifier=(object_type, instance_number),
propertyIdentifier=property_name,
propertyArrayIndex=property_index)
request.pduDestination = Address(target_address)
iocb = self.iocb_class(request)
self.bacnet_application.submit_request(iocb)
bacnet_results = iocb.ioResult.get(10)
return bacnet_results
def _get_access_spec(self, obj_data, properties):
count = 0
obj_type, obj_inst = obj_data
prop_ref_list = []
for prop, prop_index in properties:
prop_ref = PropertyReference(propertyIdentifier=prop)
if prop_index is not None:
prop_ref.propertyArrayIndex = prop_index
prop_ref_list.append(prop_ref)
count += 1
return (ReadAccessSpecification(objectIdentifier=(obj_type, obj_inst), listOfPropertyReferences=prop_ref_list),
count)
def _get_object_properties(self, point_map, target_address):
# This will be used to get the results mapped back on the the names
reverse_point_map = {}
# Used to group properties together for the request.
object_property_map = defaultdict(list)
for name, properties in point_map.items():
if len(properties) == 3:
(object_type, instance_number,
property_name) = properties
property_index = None
elif len(properties) == 4:
(object_type, instance_number, property_name,
property_index) = properties
else:
_log.error("skipping {} in request to {}: incorrect number of parameters".format(name, target_address))
continue
object_property_map[object_type, instance_number].append((property_name, property_index))
reverse_point_map[object_type, instance_number, property_name, property_index] = name
return object_property_map, reverse_point_map
@RPC.export
def read_properties(self, target_address, point_map, max_per_request=None, use_read_multiple=True):
"""
Read a set of points and return the results
"""
if not use_read_multiple:
return self.read_using_single_request(target_address, point_map)
# Set max_per_request really high if not set.
if max_per_request is None:
max_per_request = self._max_per_request
_log.debug("Reading {count} points on {target}, max per scrape: {max}".format(
count=len(point_map), target=target_address, max=max_per_request))
# process point map and populate object_property_map and
# reverse_point_map
(object_property_map, reverse_point_map) = self._get_object_properties(point_map, target_address)
result_dict = {}
finished = False
while not finished:
read_access_spec_list = []
count = 0
for _ in range(max_per_request):
try:
obj_data, properties = object_property_map.popitem()
except KeyError:
finished = True
break
(spec_list, spec_count) = self._get_access_spec(obj_data, properties)
count += spec_count
read_access_spec_list.append(spec_list)
if read_access_spec_list:
_log.debug("Requesting {count} properties from {target}".format(count=count, target=target_address))
request = ReadPropertyMultipleRequest(listOfReadAccessSpecs=read_access_spec_list)
request.pduDestination = Address(target_address)
iocb = self.iocb_class(request)
self.bacnet_application.submit_request(iocb)
bacnet_results = iocb.ioResult.get(10)
_log.debug("Received read response from {target} count: {count}".format(
count=count, target=target_address))
for prop_tuple, value in bacnet_results.items():
name = reverse_point_map[prop_tuple]
result_dict[name] = value
return result_dict
@RPC.export
def create_cov_subscription(self, address, device_path, point_name, object_type, instance_number, lifetime=None):
"""
Called by the BACnet interface to establish a COV subscription with a
BACnet device. IF there is an existing subscription for the point, a
subscribeCOVRequest is sent immediately, otherwise the covIncrement
property is confirmed to be valid before sending the subscription
request.
:param address: address of the device to which the subscription
request will be sent
:param device_path: path of the device used for the publishing topic
:param point_name: point name for which we would like to establish the
subscription
:param object_type:
:param instance_number: Arbitrarily assigned value for tracking in the
subscription context
:param lifetime: lifetime in seconds for the device to maintain the
subscription
"""
if not isinstance(address, str):
raise RuntimeError("COV subscriptions require the address of the target device as a string")
# if a subscription exists, send a cov subscription request
# otherwise check the point's covIncrement
subscription = None
for check_sub in self.bacnet_application.sub_cov_contexts.values():
if check_sub.point_name == point_name and \
check_sub.monitoredObjectIdentifier == (object_type, instance_number):
subscription = check_sub
if subscription:
self.send_cov_subscription(subscription.device_address,
subscription.subscriberProcessIdentifier,
subscription.monitoredObjectIdentifier,
subscription.lifetime,
subscription.point_name)
else:
subscription = SubscriptionContext(device_path, Address(address),
point_name,
object_type, instance_number,
self.bacnet_application.cov_sub_process_ID,
lifetime)
# check whether the device has a usable covIncrement
try:
_log.debug("establishing cov subscription for point {} on device {}".format(point_name, device_path))
self.bacnet_application.sub_cov_contexts[self.bacnet_application.cov_sub_process_ID] = subscription
self.bacnet_application.cov_sub_process_ID += 1
_log.debug("sending read property request for point {} on device {}".format(point_name, device_path))
self.read_property(address, object_type, instance_number, 'covIncrement')
except Exception as error:
_log.warning("the covIncrement for {} on {} could not be read, no cov subscription was established".
format(point_name, device_path))
_log.error(error)
def send_cov_subscription(self, address, subscriber_process_identifier, monitored_object_identifier, lifetime,
point_name):
"""
Send request to remote BACnet device to create subscription for COV on a point.
:param address: address of the device to which the subscription
request will be sent
:param subscriber_process_identifier: arbitrarily set value for
tracking cov subscriptions
:param monitored_object_identifier: (object_type, instance_number) from
the subscription context
:param lifetime: lifetime in seconds for the device to maintain the
subscription
:param point_name:point name for which we would like to establish the
subscription
:return:
"""
subscribe_cov_request = SubscribeCOVRequest(
subscriberProcessIdentifier=subscriber_process_identifier,
monitoredObjectIdentifier=monitored_object_identifier,
issueConfirmedNotifications=True,
lifetime=lifetime
)
subscribe_cov_request.pduDestination = address
iocb = self.iocb_class(subscribe_cov_request)
self.bacnet_application.submit_request(iocb)
_log.debug("COV subscription sent to device at {} for {}".format(address, point_name))
def main(argv=sys.argv):
"""
Main method called to start the agent.
"""
utils.vip_main(bacnet_proxy_agent, identity="platform.bacnet_proxy", version=__version__)
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
| 43.299544
| 120
| 0.619354
|
726c09fe9f424eb77d139d9a7c49c5458a3b702a
| 5,441
|
py
|
Python
|
astar.py
|
DanielaMorariu1990/Supermarket_MCMC_simulation
|
c94365ea93864cebd5d356670f4639ce21815e4c
|
[
"MIT"
] | 2
|
2020-11-16T14:09:55.000Z
|
2021-03-05T15:00:29.000Z
|
astar.py
|
DanielaMorariu1990/Supermarket_MCMC_simulation
|
c94365ea93864cebd5d356670f4639ce21815e4c
|
[
"MIT"
] | null | null | null |
astar.py
|
DanielaMorariu1990/Supermarket_MCMC_simulation
|
c94365ea93864cebd5d356670f4639ce21815e4c
|
[
"MIT"
] | 2
|
2020-11-16T10:24:37.000Z
|
2020-11-22T10:32:46.000Z
|
"""
A* algorithm
"""
import operator
import numpy as np
import pandas as pd
from animation_template import MARKET
def heuristic(current, target):
"""calculating the estimated distance between the current node and the targeted node
-> Manhattan Distance"""
result = (abs(target[0] - current[0]) + abs(target[1] - current[1]))
# print(result)
return result
def walkable(grid_array):
"""checks if node is on the grid and not an obstacle"""
walkable = []
for i in range(len(grid_array)):
for j in range(len(grid_array[0])):
if grid_array[i, j] == 0:
walkable.append((i, j))
# print(walkable)
return walkable
def get_path_from_finish(current):
"""Traces back the path thourgh parent-nodes"""
backwards = []
while current:
backwards.append(current.location)
current = current.parent
backwards.reverse()
return backwards
def create_neighbours(poss_moves, current_node, finish_node, grid_array, frontier):
"""Creates neighbour-nodes for current node and adds them to the frontier-list"""
for move in poss_moves:
node_position = (current_node.location[0] + move[0],
current_node.location[1] + move[1])
if node_position in walkable(grid_array):
neighbour = Node(parent=current_node,
location=node_position,
cost=current_node.cost + 1,
heur=heuristic(node_position, finish_node.location))
# print(neighbour)
frontier.append(neighbour)
return frontier
def find_path(grid_array, start, finish, p_moves):
""" A*-Algorithm that finds the shortest path between
given nodes and returns it as list of tuples"""
start_node = Node(None, start)
finish_node = Node(None, finish)
frontier = [start_node]
while frontier:
frontier.sort(key=operator.attrgetter('f_value'))
current_node = frontier[0]
frontier.pop(0)
if current_node.location != finish_node.location:
frontier = create_neighbours(
p_moves, current_node, finish_node, grid_array, frontier)
# print(frontier)
else:
shortest_path = get_path_from_finish(current_node)
return shortest_path
class Node():
"""Class for the nodes of a pathfinding grid"""
def __init__(self, parent, location, cost=0, heur=0):
self.parent = parent
self.location = location
self.cost = cost # distance from start-node (cost)
self.heur = heur # approx. distance to goal-node
self.f_value = self.cost + self.heur # sum of cost and heuristic value
grid = np.array([
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
])
possible_moves = [(0, 1), (0, -1), (1, 0), (-1, 0),
(1, 1), (1, -1), (-1, 1), (-1, -1)]
if __name__ == '__main__':
grid = np.array([
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1],
[1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 0, 1],
[1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1],
])
# wakable = grid[1, 2:16]
# wakable = wakable.append(grid[3:11, 2:4])
# wakable = wakable.append(grid[3:11, 6:8])
# walk = walkable(grid)
# print(walk)
# pd.DataFrame(walk).to_csv("./data/walk.csv")
# # y,x positions
# start_given = (1, 0)
# finish_given = (2, 5)
# possible_moves = [(0, 1), (0, -1), (1, 0), (-1, 0),
# (1, 1), (1, -1), (-1, 1), (-1, -1)]
# path = find_path(grid, start_given, finish_given, possible_moves)
# print(path)
# strat = x, y from Customer
# finish_given= transition_matrix.loc[]
# for the path function:
# - grid =market
# grid = MARKET
# print(MARKET)
entrance = (15, 10)
dairy = (5, 2)
fruits = (6, 5)
spices = (9, 6)
drinks = (4, 6)
checkout = (9, 8)
possible_moves = [(0, 1), (0, -1), (1, 0), (-1, 0),
(1, 1), (1, -1), (-1, 1), (-1, -1)]
path = find_path(grid, (10+1, 15+1), (2+1, 5+1), possible_moves)
print(path)
| 34.00625
| 88
| 0.502481
|
06eaaec4c1061083b9538762edd78057a921578b
| 2,273
|
py
|
Python
|
models/pdq_search.py
|
rishabhiitbhu/btech_project
|
dda8694325ab12e88ec06e4242a57524b29076c8
|
[
"MIT"
] | 298
|
2018-05-03T09:59:32.000Z
|
2022-03-30T02:51:46.000Z
|
models/pdq_search.py
|
binguidata/load_forecasting
|
dda8694325ab12e88ec06e4242a57524b29076c8
|
[
"MIT"
] | 15
|
2019-07-16T12:28:50.000Z
|
2022-02-10T00:01:11.000Z
|
models/pdq_search.py
|
binguidata/load_forecasting
|
dda8694325ab12e88ec06e4242a57524b29076c8
|
[
"MIT"
] | 120
|
2018-09-21T09:06:47.000Z
|
2022-03-27T02:26:15.000Z
|
# from pyramid.arima import auto_arima
import pandas as pd
import logging
import itertools
import numpy as np
import statsmodels.api as sm
import warnings
warnings.filterwarnings("ignore") # specify to ignore warning messages
# to store the log in a file called 'arima_log.txt'
logging.basicConfig(
filename='pdq_log.txt',
filemode='a',
level=logging.INFO,
format="%(asctime)s %(message)s",
)
logger = logging.getLogger()
console = logging.StreamHandler()
logger.addHandler(console)
data = pd.read_csv('monthdata.csv', header=None, index_col=['datetime'], names=['datetime', 'load'], parse_dates=['datetime'], infer_datetime_format=True)
data = data.asfreq(freq='H', method='bfill') # sample the data in hourly manner
# Define the p, d and q parameters to take any value between 0 and 3
p = d = q = range(0, 3)
# Generate all different combinations of p, q and q triplets
pdq = list(itertools.product(p, d, q))
# Generate all different combinations of seasonal p, q and q triplets
seasonal_pdq = [(x[0], x[1], x[2], 24) for x in list(itertools.product(p, d, q))]
logger.info('pdq:')
logger.info(pdq)
logger.info('seasonal_pdq')
logger.info(seasonal_pdq)
bestAIC = np.inf
bestParam = None
bestSParam = None
logger.info('Running GridSearch')
#use gridsearch to look for optimial arima parameters
for param in pdq:
for param_seasonal in seasonal_pdq:
try:
mod = sm.tsa.statespace.SARIMAX(data,
order=param,
seasonal_order=param_seasonal,
enforce_stationarity=False,
enforce_invertibility=False)
results = mod.fit()
logger.info('ARIMA{}x{}12 - AIC:{}'.format(param, param_seasonal, results.aic))
#if current run of AIC is better than the best one so far, overwrite it
if results.aic < bestAIC:
bestAIC = results.aic
bestParam = param
bestSParam = param_seasonal
except Exception as e:
print(e)
logger.info('the best bestAIC, bestParam, bestSParam:')
logger.info(bestAIC)
logger.info(bestParam)
logger.info(bestSParam)
| 30.716216
| 154
| 0.641443
|
6e3cc8c24b28610e7afe7ef0c9d01601e5941d46
| 842
|
py
|
Python
|
test/many-actions/gyptest-many-actions-unsorted.py
|
luguocfw/GYP-Tools
|
41414159d032530acbf5e426954e1020ea1aa740
|
[
"BSD-3-Clause"
] | 34
|
2015-01-14T03:21:08.000Z
|
2020-04-26T10:06:56.000Z
|
core/deps/gyp/test/many-actions/gyptest-many-actions-unsorted.py
|
K-Constantine/Amaraki
|
e8736e4754af62a8510c3a5db8a72df48f7681a7
|
[
"MIT"
] | 1
|
2019-02-03T09:45:13.000Z
|
2019-02-03T09:45:13.000Z
|
core/deps/gyp/test/many-actions/gyptest-many-actions-unsorted.py
|
K-Constantine/Amaraki
|
e8736e4754af62a8510c3a5db8a72df48f7681a7
|
[
"MIT"
] | 29
|
2015-02-13T00:18:53.000Z
|
2021-02-10T23:38:58.000Z
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure lots of actions in the same target don't cause exceeding command
line length.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('many-actions-unsorted.gyp')
test.build('many-actions-unsorted.gyp', test.ALL)
for i in range(15):
test.built_file_must_exist('generated_%d.h' % i)
# Make sure the optimized cygwin setup doesn't cause problems for incremental
# builds.
test.touch('file1')
test.build('many-actions-unsorted.gyp', test.ALL)
test.touch('file0')
test.build('many-actions-unsorted.gyp', test.ALL)
test.touch('file2')
test.touch('file3')
test.touch('file4')
test.build('many-actions-unsorted.gyp', test.ALL)
test.pass_test()
| 23.388889
| 77
| 0.739905
|
110e96c2716e8ef7923ec3772888b9ef5b010df5
| 14,694
|
py
|
Python
|
mssql/introspection.py
|
martinzellner/mssql-django
|
c809d623326117ed9e822ba61170cfa24b29d2b0
|
[
"BSD-3-Clause"
] | null | null | null |
mssql/introspection.py
|
martinzellner/mssql-django
|
c809d623326117ed9e822ba61170cfa24b29d2b0
|
[
"BSD-3-Clause"
] | null | null | null |
mssql/introspection.py
|
martinzellner/mssql-django
|
c809d623326117ed9e822ba61170cfa24b29d2b0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import pyodbc as Database
from django import VERSION
from django.db.backends.base.introspection import (
BaseDatabaseIntrospection, FieldInfo, TableInfo,
)
from django.db.models.indexes import Index
SQL_AUTOFIELD = -777555
SQL_BIGAUTOFIELD = -777444
class DatabaseIntrospection(BaseDatabaseIntrospection):
# Map type codes to Django Field types.
data_types_reverse = {
SQL_AUTOFIELD: 'AutoField',
SQL_BIGAUTOFIELD: 'BigAutoField',
Database.SQL_BIGINT: 'BigIntegerField',
# Database.SQL_BINARY: ,
Database.SQL_BIT: 'BooleanField',
Database.SQL_CHAR: 'CharField',
Database.SQL_DECIMAL: 'DecimalField',
Database.SQL_DOUBLE: 'FloatField',
Database.SQL_FLOAT: 'FloatField',
Database.SQL_GUID: 'TextField',
Database.SQL_INTEGER: 'IntegerField',
Database.SQL_LONGVARBINARY: 'BinaryField',
# Database.SQL_LONGVARCHAR: ,
Database.SQL_NUMERIC: 'DecimalField',
Database.SQL_REAL: 'FloatField',
Database.SQL_SMALLINT: 'SmallIntegerField',
Database.SQL_SS_TIME2: 'TimeField',
Database.SQL_TINYINT: 'SmallIntegerField',
Database.SQL_TYPE_DATE: 'DateField',
Database.SQL_TYPE_TIME: 'TimeField',
Database.SQL_TYPE_TIMESTAMP: 'DateTimeField',
Database.SQL_VARBINARY: 'BinaryField',
Database.SQL_VARCHAR: 'TextField',
Database.SQL_WCHAR: 'CharField',
Database.SQL_WLONGVARCHAR: 'TextField',
Database.SQL_WVARCHAR: 'TextField',
}
ignored_tables = []
def get_field_type(self, data_type, description):
field_type = super().get_field_type(data_type, description)
# the max nvarchar length is described as 0 or 2**30-1
# (it depends on the driver)
size = description.internal_size
if field_type == 'CharField':
if size == 0 or size >= 2**30 - 1:
field_type = "TextField"
elif field_type == 'TextField':
if size > 0 and size < 2**30 - 1:
field_type = 'CharField'
return field_type
def get_table_list(self, cursor):
"""
Returns a list of table and view names in the current database.
"""
sql = 'SELECT TABLE_NAME, TABLE_TYPE FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = SCHEMA_NAME()'
cursor.execute(sql)
types = {'BASE TABLE': 't', 'VIEW': 'v'}
return [TableInfo(row[0], types.get(row[1]))
for row in cursor.fetchall()
if row[0] not in self.ignored_tables]
def _is_auto_field(self, cursor, table_name, column_name):
"""
Checks whether column is Identity
"""
# COLUMNPROPERTY: http://msdn2.microsoft.com/en-us/library/ms174968.aspx
# from django.db import connection
# cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')",
# (connection.ops.quote_name(table_name), column_name))
cursor.execute("SELECT COLUMNPROPERTY(OBJECT_ID(%s), %s, 'IsIdentity')",
(self.connection.ops.quote_name(table_name), column_name))
return cursor.fetchall()[0][0]
def get_table_description(self, cursor, table_name, identity_check=True):
"""Returns a description of the table, with DB-API cursor.description interface.
The 'auto_check' parameter has been added to the function argspec.
If set to True, the function will check each of the table's fields for the
IDENTITY property (the IDENTITY property is the MSSQL equivalent to an AutoField).
When an integer field is found with an IDENTITY property, it is given a custom field number
of SQL_AUTOFIELD, which maps to the 'AutoField' value in the DATA_TYPES_REVERSE dict.
When a bigint field is found with an IDENTITY property, it is given a custom field number
of SQL_BIGAUTOFIELD, which maps to the 'BigAutoField' value in the DATA_TYPES_REVERSE dict.
"""
# map pyodbc's cursor.columns to db-api cursor description
if VERSION >= (3, 2):
columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10], c[12], ''] for c in cursor.columns(table=table_name)]
else:
columns = [[c[3], c[4], None, c[6], c[6], c[8], c[10], c[12]] for c in cursor.columns(table=table_name)]
items = []
for column in columns:
if identity_check and self._is_auto_field(cursor, table_name, column[0]):
if column[1] == Database.SQL_BIGINT:
column[1] = SQL_BIGAUTOFIELD
else:
column[1] = SQL_AUTOFIELD
if column[1] == Database.SQL_WVARCHAR and column[3] < 4000:
column[1] = Database.SQL_WCHAR
items.append(FieldInfo(*column))
return items
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute("""
SELECT c.name FROM sys.columns c
INNER JOIN sys.tables t ON c.object_id = t.object_id
WHERE t.schema_id = SCHEMA_ID() AND t.name = %s AND c.is_identity = 1""",
[table_name])
# SQL Server allows only one identity column per table
# https://docs.microsoft.com/en-us/sql/t-sql/statements/create-table-transact-sql-identity-property
row = cursor.fetchone()
return [{'table': table_name, 'column': row[0]}] if row else []
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_name: (field_name_other_table, other_table)}
representing all relationships to the given table.
"""
# CONSTRAINT_COLUMN_USAGE: http://msdn2.microsoft.com/en-us/library/ms174431.aspx
# CONSTRAINT_TABLE_USAGE: http://msdn2.microsoft.com/en-us/library/ms179883.aspx
# REFERENTIAL_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms179987.aspx
# TABLE_CONSTRAINTS: http://msdn2.microsoft.com/en-us/library/ms181757.aspx
sql = """
SELECT e.COLUMN_NAME AS column_name,
c.TABLE_NAME AS referenced_table_name,
d.COLUMN_NAME AS referenced_column_name
FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS a
INNER JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS b
ON a.CONSTRAINT_NAME = b.CONSTRAINT_NAME AND a.TABLE_SCHEMA = b.CONSTRAINT_SCHEMA
INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_TABLE_USAGE AS c
ON b.UNIQUE_CONSTRAINT_NAME = c.CONSTRAINT_NAME AND b.CONSTRAINT_SCHEMA = c.CONSTRAINT_SCHEMA
INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS d
ON c.CONSTRAINT_NAME = d.CONSTRAINT_NAME AND c.CONSTRAINT_SCHEMA = d.CONSTRAINT_SCHEMA
INNER JOIN INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS e
ON a.CONSTRAINT_NAME = e.CONSTRAINT_NAME AND a.TABLE_SCHEMA = e.TABLE_SCHEMA
WHERE a.TABLE_SCHEMA = SCHEMA_NAME() AND a.TABLE_NAME = %s AND a.CONSTRAINT_TYPE = 'FOREIGN KEY'"""
cursor.execute(sql, (table_name,))
return dict([[item[0], (item[2], item[1])] for item in cursor.fetchall()])
def get_key_columns(self, cursor, table_name):
"""
Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
key columns in given table.
"""
key_columns = []
cursor.execute("""
SELECT c.name AS column_name, rt.name AS referenced_table_name, rc.name AS referenced_column_name
FROM sys.foreign_key_columns fk
INNER JOIN sys.tables t ON t.object_id = fk.parent_object_id
INNER JOIN sys.columns c ON c.object_id = t.object_id AND c.column_id = fk.parent_column_id
INNER JOIN sys.tables rt ON rt.object_id = fk.referenced_object_id
INNER JOIN sys.columns rc ON rc.object_id = rt.object_id AND rc.column_id = fk.referenced_column_id
WHERE t.schema_id = SCHEMA_ID() AND t.name = %s""", [table_name])
key_columns.extend([tuple(row) for row in cursor.fetchall()])
return key_columns
def get_constraints(self, cursor, table_name):
"""
Retrieves any constraints or keys (unique, pk, fk, check, index)
across one or more columns.
Returns a dict mapping constraint names to their attributes,
where attributes is a dict with keys:
* columns: List of columns this covers
* primary_key: True if primary key, False otherwise
* unique: True if this is a unique constraint, False otherwise
* foreign_key: (table, column) of target, or None
* check: True if check constraint, False otherwise
* index: True if index, False otherwise.
* orders: The order (ASC/DESC) defined for the columns of indexes
* type: The type of the index (btree, hash, etc.)
"""
constraints = {}
# Loop over the key table, collecting things as constraints
# This will get PKs, FKs, and uniques, but not CHECK
cursor.execute("""
SELECT
kc.constraint_name,
kc.column_name,
tc.constraint_type,
fk.referenced_table_name,
fk.referenced_column_name
FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS kc
INNER JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS tc ON
kc.table_schema = tc.table_schema AND
kc.table_name = tc.table_name AND
kc.constraint_name = tc.constraint_name
LEFT OUTER JOIN (
SELECT
ps.name AS table_schema,
pt.name AS table_name,
pc.name AS column_name,
rt.name AS referenced_table_name,
rc.name AS referenced_column_name
FROM
sys.foreign_key_columns fkc
INNER JOIN sys.tables pt ON
fkc.parent_object_id = pt.object_id
INNER JOIN sys.schemas ps ON
pt.schema_id = ps.schema_id
INNER JOIN sys.columns pc ON
fkc.parent_object_id = pc.object_id AND
fkc.parent_column_id = pc.column_id
INNER JOIN sys.tables rt ON
fkc.referenced_object_id = rt.object_id
INNER JOIN sys.schemas rs ON
rt.schema_id = rs.schema_id
INNER JOIN sys.columns rc ON
fkc.referenced_object_id = rc.object_id AND
fkc.referenced_column_id = rc.column_id
) fk ON
kc.table_schema = fk.table_schema AND
kc.table_name = fk.table_name AND
kc.column_name = fk.column_name
WHERE
kc.table_schema = SCHEMA_NAME() AND
kc.table_name = %s
ORDER BY
kc.constraint_name ASC,
kc.ordinal_position ASC
""", [table_name])
for constraint, column, kind, ref_table, ref_column in cursor.fetchall():
# If we're the first column, make the record
if constraint not in constraints:
constraints[constraint] = {
"columns": [],
"primary_key": kind.lower() == "primary key",
"unique": kind.lower() in ["primary key", "unique"],
"foreign_key": (ref_table, ref_column) if kind.lower() == "foreign key" else None,
"check": False,
"index": False,
}
# Record the details
constraints[constraint]['columns'].append(column)
# Now get CHECK constraint columns
cursor.execute("""
SELECT kc.constraint_name, kc.column_name
FROM INFORMATION_SCHEMA.CONSTRAINT_COLUMN_USAGE AS kc
JOIN INFORMATION_SCHEMA.TABLE_CONSTRAINTS AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
c.constraint_type = 'CHECK' AND
kc.table_schema = SCHEMA_NAME() AND
kc.table_name = %s
""", [table_name])
for constraint, column in cursor.fetchall():
# If we're the first column, make the record
if constraint not in constraints:
constraints[constraint] = {
"columns": [],
"primary_key": False,
"unique": False,
"foreign_key": None,
"check": True,
"index": False,
}
# Record the details
constraints[constraint]['columns'].append(column)
# Now get indexes
cursor.execute("""
SELECT
i.name AS index_name,
i.is_unique,
i.is_primary_key,
i.type,
i.type_desc,
ic.is_descending_key,
c.name AS column_name
FROM
sys.tables AS t
INNER JOIN sys.schemas AS s ON
t.schema_id = s.schema_id
INNER JOIN sys.indexes AS i ON
t.object_id = i.object_id
INNER JOIN sys.index_columns AS ic ON
i.object_id = ic.object_id AND
i.index_id = ic.index_id
INNER JOIN sys.columns AS c ON
ic.object_id = c.object_id AND
ic.column_id = c.column_id
WHERE
t.schema_id = SCHEMA_ID() AND
t.name = %s
ORDER BY
i.index_id ASC,
ic.index_column_id ASC
""", [table_name])
indexes = {}
for index, unique, primary, type_, desc, order, column in cursor.fetchall():
if index not in indexes:
indexes[index] = {
"columns": [],
"primary_key": primary,
"unique": unique,
"foreign_key": None,
"check": False,
"index": True,
"orders": [],
"type": Index.suffix if type_ in (1, 2) else desc.lower(),
}
indexes[index]["columns"].append(column)
indexes[index]["orders"].append("DESC" if order == 1 else "ASC")
for index, constraint in indexes.items():
if index not in constraints:
constraints[index] = constraint
return constraints
| 45.212308
| 120
| 0.594801
|
4fd5c5f0a0e3b16500305860ea5e2034bb85e729
| 11,876
|
py
|
Python
|
app/gui.py
|
ITA-ftuyama/TG
|
9619f3a243ecdc4c9274ef91c4bc6a8d21029d57
|
[
"MIT"
] | null | null | null |
app/gui.py
|
ITA-ftuyama/TG
|
9619f3a243ecdc4c9274ef91c4bc6a8d21029d57
|
[
"MIT"
] | null | null | null |
app/gui.py
|
ITA-ftuyama/TG
|
9619f3a243ecdc4c9274ef91c4bc6a8d21029d57
|
[
"MIT"
] | 1
|
2019-11-20T03:17:17.000Z
|
2019-11-20T03:17:17.000Z
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_win_plot(object):
def setupUi(self, win_plot):
win_plot.setObjectName(_fromUtf8("win_plot"))
win_plot.resize(1067, 708)
self.centralwidget = QtGui.QWidget(win_plot)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.label_2 = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(24)
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.horizontalLayout_3.addWidget(self.label_2, QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.verticalLayout_5 = QtGui.QVBoxLayout()
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.label = QtGui.QLabel(self.centralwidget)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout_5.addWidget(self.label)
self.qwtPlot = QwtPlot(self.centralwidget)
self.qwtPlot.setObjectName(_fromUtf8("qwtPlot"))
self.verticalLayout_5.addWidget(self.qwtPlot)
self.horizontalLayout_2.addLayout(self.verticalLayout_5)
self.verticalLayout_3 = QtGui.QVBoxLayout()
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.label_3 = QtGui.QLabel(self.centralwidget)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.verticalLayout_3.addWidget(self.label_3)
self.qwtBarPlot = QwtPlot(self.centralwidget)
self.qwtBarPlot.setObjectName(_fromUtf8("qwtBarPlot"))
self.verticalLayout_3.addWidget(self.qwtBarPlot)
self.horizontalLayout_2.addLayout(self.verticalLayout_3)
self.verticalLayout_6 = QtGui.QVBoxLayout()
self.verticalLayout_6.setMargin(0)
self.verticalLayout_6.setSpacing(0)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.groupBox_2 = QtGui.QGroupBox(self.centralwidget)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayout_6.addWidget(self.groupBox_2)
self.label_6 = QtGui.QLabel(self.centralwidget)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.verticalLayout_6.addWidget(self.label_6)
self.progressBar = QtGui.QProgressBar(self.centralwidget)
self.progressBar.setProperty("value", 24)
self.progressBar.setObjectName(_fromUtf8("progressBar"))
self.verticalLayout_6.addWidget(self.progressBar)
self.label_5 = QtGui.QLabel(self.centralwidget)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.verticalLayout_6.addWidget(self.label_5)
self.progressBar_2 = QtGui.QProgressBar(self.centralwidget)
self.progressBar_2.setProperty("value", 24)
self.progressBar_2.setObjectName(_fromUtf8("progressBar_2"))
self.verticalLayout_6.addWidget(self.progressBar_2)
self.label_7 = QtGui.QLabel(self.centralwidget)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.verticalLayout_6.addWidget(self.label_7)
self.progressBar_3 = QtGui.QProgressBar(self.centralwidget)
self.progressBar_3.setProperty("value", 24)
self.progressBar_3.setObjectName(_fromUtf8("progressBar_3"))
self.verticalLayout_6.addWidget(self.progressBar_3)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.verticalLayout_6.addItem(spacerItem)
self.label_10 = QtGui.QLabel(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_10.sizePolicy().hasHeightForWidth())
self.label_10.setSizePolicy(sizePolicy)
self.label_10.setMinimumSize(QtCore.QSize(0, 0))
self.label_10.setObjectName(_fromUtf8("label_10"))
self.verticalLayout_6.addWidget(self.label_10)
self.intention_label = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setItalic(True)
font.setWeight(75)
self.intention_label.setFont(font)
self.intention_label.setObjectName(_fromUtf8("intention_label"))
self.verticalLayout_6.addWidget(self.intention_label)
self.action_label = QtGui.QLabel(self.centralwidget)
self.action_label.setObjectName(_fromUtf8("action_label"))
self.verticalLayout_6.addWidget(self.action_label)
self.groupBox = QtGui.QGroupBox(self.centralwidget)
self.groupBox.setMinimumSize(QtCore.QSize(0, 28))
self.groupBox.setMaximumSize(QtCore.QSize(137, 28))
self.groupBox.setTitle(_fromUtf8(""))
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.verticalLayout_6.addWidget(self.groupBox, QtCore.Qt.AlignTop)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.verticalLayout_6.addItem(spacerItem1)
self.label_9 = QtGui.QLabel(self.centralwidget)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.verticalLayout_6.addWidget(self.label_9)
self.lineEdit = QtGui.QLineEdit(self.centralwidget)
self.lineEdit.setInputMask(_fromUtf8(""))
self.lineEdit.setText(_fromUtf8(""))
self.lineEdit.setReadOnly(False)
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.verticalLayout_6.addWidget(self.lineEdit)
self.pushButton = QtGui.QPushButton(self.centralwidget)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.verticalLayout_6.addWidget(self.pushButton)
self.horizontalLayout_2.addLayout(self.verticalLayout_6)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setContentsMargins(6, 0, 6, 0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.btnA = QtGui.QPushButton(self.centralwidget)
self.btnA.setObjectName(_fromUtf8("btnA"))
self.horizontalLayout.addWidget(self.btnA)
self.btnB = QtGui.QPushButton(self.centralwidget)
self.btnB.setObjectName(_fromUtf8("btnB"))
self.horizontalLayout.addWidget(self.btnB)
self.btnC = QtGui.QPushButton(self.centralwidget)
self.btnC.setObjectName(_fromUtf8("btnC"))
self.horizontalLayout.addWidget(self.btnC)
self.btnD = QtGui.QPushButton(self.centralwidget)
self.btnD.setObjectName(_fromUtf8("btnD"))
self.horizontalLayout.addWidget(self.btnD)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.verticalLayout_2.addItem(spacerItem2)
self.label_4 = QtGui.QLabel(self.centralwidget)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.verticalLayout_2.addWidget(self.label_4)
self.bluetooth = QtGui.QLabel(self.centralwidget)
self.bluetooth.setAutoFillBackground(False)
self.bluetooth.setTextFormat(QtCore.Qt.RichText)
self.bluetooth.setWordWrap(False)
self.bluetooth.setObjectName(_fromUtf8("bluetooth"))
self.verticalLayout_2.addWidget(self.bluetooth)
self.serial = QtGui.QLabel(self.centralwidget)
self.serial.setObjectName(_fromUtf8("serial"))
self.verticalLayout_2.addWidget(self.serial)
self.status = QtGui.QLabel(self.centralwidget)
self.status.setObjectName(_fromUtf8("status"))
self.verticalLayout_2.addWidget(self.status)
self.horizontalLayout_4.addLayout(self.verticalLayout_2)
self.verticalLayout.addLayout(self.horizontalLayout_4)
win_plot.setCentralWidget(self.centralwidget)
self.retranslateUi(win_plot)
QtCore.QMetaObject.connectSlotsByName(win_plot)
def retranslateUi(self, win_plot):
win_plot.setWindowTitle(_translate("win_plot", "TEEG Application", None))
self.label_2.setText(_translate("win_plot", "TEEG - Translate Electroencefalography", None))
self.label.setText(_translate("win_plot", "<html><head/><body><p><span style=\" font-size:14pt;\">Real Time Data</span></p></body></html>", None))
self.label_3.setText(_translate("win_plot", "<html><head/><body><p><span style=\" font-size:14pt;\">Spectral Analysis</span></p></body></html>", None))
self.groupBox_2.setTitle(_translate("win_plot", "Mindwave Levels", None))
self.label_6.setText(_translate("win_plot", "Blink", None))
self.label_5.setText(_translate("win_plot", "Attention", None))
self.label_7.setText(_translate("win_plot", "Meditation", None))
self.label_10.setText(_translate("win_plot", "<html><head/><body><p><span style=\" font-weight:600;\">Predicted Actions</span></p></body></html>", None))
self.intention_label.setText(_translate("win_plot", "intention", None))
self.action_label.setText(_translate("win_plot", "[action]", None))
self.label_9.setText(_translate("win_plot", "<html><head/><body><p><span style=\" font-weight:600;\">Record Panel</span></p></body></html>", None))
self.lineEdit.setPlaceholderText(_translate("win_plot", "Filename...", None))
self.pushButton.setText(_translate("win_plot", "Not Recording", None))
self.btnA.setText(_translate("win_plot", "x0.5", None))
self.btnB.setText(_translate("win_plot", "x1", None))
self.btnC.setText(_translate("win_plot", "x10", None))
self.btnD.setText(_translate("win_plot", "x100", None))
self.label_4.setText(_translate("win_plot", "<html><head/><body><p><span style=\" font-size:12pt; font-weight:600;\">Messages Log</span></p></body></html>", None))
self.bluetooth.setText(_translate("win_plot", "<html><head/><body><p><span style=\" color:#0000ff;\">[Bluetooth]</span></p></body></html>", None))
self.serial.setText(_translate("win_plot", "<html><head/><body><p><span style=\" color:#aa0000;\">[Serial]</span></p></body></html>", None))
self.status.setText(_translate("win_plot", "<html><head/><body><p><span style=\" color:#005500;\">[Status]</span></p></body></html>", None))
from qwt_plot import QwtPlot
| 57.371981
| 171
| 0.710677
|
39770c7f7436fa0d5c7ac1b078393e7cef9d1ddf
| 590
|
py
|
Python
|
puppy_store/puppies/models.py
|
berkayersever/django-puppy-store
|
7e7ea5d2befd7bc1bcd02226a60150dd42907aeb
|
[
"MIT"
] | null | null | null |
puppy_store/puppies/models.py
|
berkayersever/django-puppy-store
|
7e7ea5d2befd7bc1bcd02226a60150dd42907aeb
|
[
"MIT"
] | null | null | null |
puppy_store/puppies/models.py
|
berkayersever/django-puppy-store
|
7e7ea5d2befd7bc1bcd02226a60150dd42907aeb
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Puppy(models.Model):
"""
Puppy Model
Defines the attributes of a puppy
"""
name = models.CharField(max_length=255)
age = models.IntegerField()
breed = models.CharField(max_length=255)
color = models.CharField(max_length=255)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def get_breed(self):
return self.name + ' belongs to ' + self.breed + ' breed.'
def __repr__(self):
return self.name + ' is added.'
| 25.652174
| 66
| 0.672881
|
c85b82bf7bc080892f69baa663fb649db1686fee
| 936
|
py
|
Python
|
isi_sdk_8_2_1/test/test_hdfs_inotify_settings.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_2_1/test/test_hdfs_inotify_settings.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_2_1/test/test_hdfs_inotify_settings.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 8
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import isi_sdk_8_2_1
from isi_sdk_8_2_1.models.hdfs_inotify_settings import HdfsInotifySettings # noqa: E501
from isi_sdk_8_2_1.rest import ApiException
class TestHdfsInotifySettings(unittest.TestCase):
"""HdfsInotifySettings unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testHdfsInotifySettings(self):
"""Test HdfsInotifySettings"""
# FIXME: construct object with mandatory attributes with example values
# model = isi_sdk_8_2_1.models.hdfs_inotify_settings.HdfsInotifySettings() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 22.829268
| 96
| 0.715812
|
e537a1422c9ef0945b2031f075e440637756b40c
| 1,686
|
py
|
Python
|
tests/test_add_remove_cgpm.py
|
probcomp/cgpm2
|
280ab5bf3dd0d7c61196deaff7cb590692fc412a
|
[
"Apache-2.0"
] | 3
|
2019-01-20T08:55:06.000Z
|
2019-12-02T05:59:26.000Z
|
tests/test_add_remove_cgpm.py
|
probcomp/cgpm2
|
280ab5bf3dd0d7c61196deaff7cb590692fc412a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_add_remove_cgpm.py
|
probcomp/cgpm2
|
280ab5bf3dd0d7c61196deaff7cb590692fc412a
|
[
"Apache-2.0"
] | 3
|
2019-08-06T07:27:34.000Z
|
2019-09-28T23:26:57.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2018 MIT Probabilistic Computing Project.
# Released under Apache 2.0; refer to LICENSE.txt.
import pytest
from cgpm.utils.general import get_prng
from cgpm2.crp import CRP
from cgpm2.normal import Normal
from cgpm2.product import Product
from cgpm2.flexible_rowmix import FlexibleRowMixture
from cgpm2.walks import remove_cgpm
from cgpm2.walks import add_cgpm
def test_add_remove():
prng = get_prng(2)
mixture0 = FlexibleRowMixture(
cgpm_row_divide=CRP([2], [], rng=prng),
cgpm_components_base=Product([
Normal([0], [], rng=prng),
Normal([1], [], rng=prng),
], rng=prng),
rng=prng)
for rowid, row in enumerate([[0,.9] ,[.5, 1], [-.5, 1.2]]):
mixture0.observe(rowid, {0:row[0], 1:row[1]})
mixture1 = remove_cgpm(mixture0, 0)
assert mixture0.outputs == [2, 0, 1]
assert mixture1.outputs == [2, 1]
mixture2 = add_cgpm(mixture1, Normal([0], [], rng=prng))
assert mixture0.outputs == [2, 0, 1]
assert mixture1.outputs == [2, 1]
assert mixture2.outputs == [2, 1, 0]
mixture3 = remove_cgpm(mixture2, 1)
assert mixture0.outputs == [2, 0, 1]
assert mixture1.outputs == [2, 1]
assert mixture2.outputs == [2, 1, 0]
assert mixture3.outputs == [2, 0]
mixture4 = remove_cgpm(mixture3, 0)
assert mixture0.outputs == [2, 0, 1]
assert mixture1.outputs == [2, 1]
assert mixture2.outputs == [2, 1, 0]
assert mixture3.outputs == [2, 0]
assert mixture4.outputs == [2]
with pytest.raises(Exception):
# Cannot remove the cgpm_row_divide for a mixture.
mixture3 = remove_cgpm(mixture2, 2)
| 30.107143
| 63
| 0.639976
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.