hexsha
stringlengths 40
40
| size
int64 4
1.02M
| ext
stringclasses 8
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
209
| max_stars_repo_name
stringlengths 5
121
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
209
| max_issues_repo_name
stringlengths 5
121
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
209
| max_forks_repo_name
stringlengths 5
121
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 4
1.02M
| avg_line_length
float64 1.07
66.1k
| max_line_length
int64 4
266k
| alphanum_fraction
float64 0.01
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d694261614244cdcd722d871732847e1a98e9435
| 1,425
|
py
|
Python
|
WeXinCrawler/items.py
|
lihuafeng/WechatSpider
|
d8eac0182b2d3175dc689d901246cffa4f355c3c
|
[
"Apache-2.0"
] | 3
|
2019-01-18T05:06:55.000Z
|
2019-09-26T09:29:27.000Z
|
WeXinCrawler/items.py
|
lihuafeng/WechatSpider
|
d8eac0182b2d3175dc689d901246cffa4f355c3c
|
[
"Apache-2.0"
] | null | null | null |
WeXinCrawler/items.py
|
lihuafeng/WechatSpider
|
d8eac0182b2d3175dc689d901246cffa4f355c3c
|
[
"Apache-2.0"
] | 2
|
2019-02-21T09:08:00.000Z
|
2019-12-23T15:51:38.000Z
|
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import datetime
import scrapy
from scrapy.loader import ItemLoader
from scrapy.loader.processors import MapCompose, TakeFirst
class WeixinItemLoader(ItemLoader):
# 自定义itemloader
default_output_processor = TakeFirst()
def format_time(value):
return datetime.datetime.fromtimestamp(value)
def format_str(value):
return str(value)
class WexincrawlerItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
appmsgid = scrapy.Field(input_processor=MapCompose(format_str))
cover = scrapy.Field()
digest = scrapy.Field()
link = scrapy.Field()
title = scrapy.Field()
update_time = scrapy.Field(input_processor=MapCompose(format_time))
content = scrapy.Field()
crawl_time = scrapy.Field()
def get_insert_sql(self):
insert_sql = """
insert into weixin_spider(cover, appmsgid, diqest, link, title, content, update_time) VALUES (%s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE appmsgid=VALUES(appmsgid), link=VALUES(link)
"""
params = (
self["cover"], self["appmsgid"], self["digest"], self["link"],
self["title"], self["content"], self["update_time"]
)
return insert_sql, params
| 25.909091
| 135
| 0.665965
|
416ec9847d8beb28fd58dfed24f0898b601b1255
| 7,722
|
py
|
Python
|
mmtbx/regression/ncs/tst_minimization_ncs_constraints_real_space.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 155
|
2016-11-23T12:52:16.000Z
|
2022-03-31T15:35:44.000Z
|
mmtbx/regression/ncs/tst_minimization_ncs_constraints_real_space.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 590
|
2016-12-10T11:31:18.000Z
|
2022-03-30T23:10:09.000Z
|
mmtbx/regression/ncs/tst_minimization_ncs_constraints_real_space.py
|
dperl-sol/cctbx_project
|
b9e390221a2bc4fd00b9122e97c3b79c632c6664
|
[
"BSD-3-Clause-LBNL"
] | 115
|
2016-11-15T08:17:28.000Z
|
2022-02-09T15:30:14.000Z
|
from __future__ import absolute_import, division, print_function
import time
import mmtbx.refinement.minimization_ncs_constraints
#import mmtbx.refinement.real_space.weight
from scitbx.array_family import flex
import mmtbx.ncs.ncs_utils as nu
import iotbx.ncs as ncs
import mmtbx.utils
import iotbx.pdb
import mmtbx.ncs
import math
from six.moves import range
pdb_str_answer = """\
CRYST1 26.628 30.419 28.493 90.00 90.00 90.00 P 1
ATOM 1 N THR A 1 15.638 20.419 12.645 1.00 10.00 N
ATOM 2 CA THR A 1 15.527 19.061 12.125 1.00 10.00 C
ATOM 3 C THR A 1 15.602 18.033 13.249 1.00 10.00 C
ATOM 4 O THR A 1 16.417 18.157 14.163 1.00 10.00 O
ATOM 5 CB THR A 1 16.628 18.760 11.092 1.00 10.00 C
ATOM 6 OG1 THR A 1 16.528 19.682 10.000 1.00 10.00 O
ATOM 7 CG2 THR A 1 16.491 17.340 10.565 1.00 10.00 C
TER
ATOM 1 N THR B 1 10.624 17.093 17.237 1.00 10.00 N
ATOM 2 CA THR B 1 11.140 16.654 15.945 1.00 10.00 C
ATOM 3 C THR B 1 12.494 15.968 16.097 1.00 10.00 C
ATOM 4 O THR B 1 13.351 16.426 16.852 1.00 10.00 O
ATOM 5 CB THR B 1 11.279 17.832 14.963 1.00 10.00 C
ATOM 6 OG1 THR B 1 10.000 18.446 14.765 1.00 10.00 O
ATOM 7 CG2 THR B 1 11.818 17.350 13.625 1.00 10.00 C
TER
ATOM 1 N THR C 1 12.949 10.000 18.493 1.00 10.00 N
ATOM 2 CA THR C 1 12.735 10.702 17.233 1.00 10.00 C
ATOM 3 C THR C 1 13.937 11.570 16.875 1.00 10.00 C
ATOM 4 O THR C 1 14.508 12.241 17.734 1.00 10.00 O
ATOM 5 CB THR C 1 11.474 11.584 17.284 1.00 10.00 C
ATOM 6 OG1 THR C 1 10.328 10.770 17.559 1.00 10.00 O
ATOM 7 CG2 THR C 1 11.273 12.305 15.960 1.00 10.00 C
TER
END
"""
pdb_str_poor = """\
CRYST1 26.628 30.419 28.493 90.00 90.00 90.00 P 1
ATOM 1 N THR A 1 15.886 19.796 13.070 1.00 10.00 N
ATOM 2 CA THR A 1 15.489 18.833 12.050 1.00 10.00 C
ATOM 3 C THR A 1 15.086 17.502 12.676 1.00 10.00 C
ATOM 4 O THR A 1 15.739 17.017 13.600 1.00 10.00 O
ATOM 5 CB THR A 1 16.619 18.590 11.033 1.00 10.00 C
ATOM 6 OG1 THR A 1 16.963 19.824 10.392 1.00 10.00 O
ATOM 7 CG2 THR A 1 16.182 17.583 9.980 1.00 10.00 C
TER 8 THR A 1
ATOM 1 N THR B 1 10.028 17.193 16.617 1.00 10.00 N
ATOM 2 CA THR B 1 11.046 16.727 15.681 1.00 10.00 C
ATOM 3 C THR B 1 12.336 16.360 16.407 1.00 10.00 C
ATOM 4 O THR B 1 12.772 17.068 17.313 1.00 10.00 O
ATOM 5 CB THR B 1 11.356 17.789 14.609 1.00 10.00 C
ATOM 6 OG1 THR B 1 10.163 18.098 13.879 1.00 10.00 O
ATOM 7 CG2 THR B 1 12.418 17.281 13.646 1.00 10.00 C
TER 16 THR B 1
ATOM 1 N THR C 1 12.121 9.329 18.086 1.00 10.00 N
ATOM 2 CA THR C 1 12.245 10.284 16.991 1.00 10.00 C
ATOM 3 C THR C 1 13.707 10.622 16.718 1.00 10.00 C
ATOM 4 O THR C 1 14.493 10.814 17.645 1.00 10.00 O
ATOM 5 CB THR C 1 11.474 11.584 17.284 1.00 10.00 C
ATOM 6 OG1 THR C 1 10.087 11.287 17.482 1.00 10.00 O
ATOM 7 CG2 THR C 1 11.619 12.563 16.129 1.00 10.00 C
TER 24 THR C 1
END
"""
pdb_str_poor2 = """\
CRYST1 26.628 30.419 28.493 90.00 90.00 90.00 P 1
ATOM 1 N THR A 1 15.886 19.796 13.070 1.00 10.00 N
ATOM 2 CA THR A 1 15.489 18.833 12.050 1.00 10.00 C
ATOM 3 C THR A 1 15.086 17.502 12.676 1.00 10.00 C
ATOM 4 O THR A 1 15.739 17.017 13.600 1.00 10.00 O
ATOM 5 CB THR A 1 16.619 18.590 11.033 1.00 10.00 C
ATOM 6 OG1 THR A 1 16.963 19.824 10.392 1.00 10.00 O
ATOM 7 CG2 THR A 1 16.182 17.583 9.980 1.00 10.00 C
TER 8 THR A 1
"""
def run(prefix="tst", d_min=1.0):
"""
NCS constraints: xyz, adp, and operators.
"""
pdb_file_name_answer = "%s_answer.pdb"%prefix
of=open(pdb_file_name_answer, "w")
print(pdb_str_answer, file=of)
of.close()
#
pdb_file_name_poor = "%s_poor.pdb"%prefix
of=open(pdb_file_name_poor, "w")
print(pdb_str_poor, file=of)
of.close()
#
pdb_inp_answer = iotbx.pdb.input(file_name=pdb_file_name_answer)
ph_answer = pdb_inp_answer.construct_hierarchy()
ph_answer.atoms().reset_i_seq()
xrs_answer = pdb_inp_answer.xray_structure_simple()
#
pdb_inp_poor = iotbx.pdb.input(file_name=pdb_file_name_poor)
ph_poor = pdb_inp_poor.construct_hierarchy()
ph_poor_obj = iotbx.pdb.input(source_info=None, lines=pdb_str_poor2)
ph_poor.atoms().reset_i_seq()
xrs_poor = pdb_inp_poor.xray_structure_simple()
#
ppf = mmtbx.utils.process_pdb_file_srv(log=False).process_pdb_files(
raw_records=pdb_str_poor.splitlines())[0]
mmtbx.utils.assert_xray_structures_equal(
x1=ppf.xray_structure(show_summary = False),
x2=xrs_poor)
restraints_manager = mmtbx.restraints.manager(
geometry = ppf.geometry_restraints_manager(show_energies = False),
normalization = True)
#
fc = xrs_answer.structure_factors(d_min=d_min).f_calc()
fft_map = fc.fft_map(resolution_factor = 0.25)
fft_map.apply_sigma_scaling()
map_data = fft_map.real_map_unpadded()
#
transforms_obj = ncs.input(hierarchy=ph_answer)
x = transforms_obj.get_ncs_restraints_group_list().concatenate_rot_tran()
x = nu.shake_transformations(
x = x,
shake_angles_sigma=1*math.pi/180,
shake_translation_sigma=0.1)
nrgl = transforms_obj.get_ncs_restraints_group_list()
nrgl.update_rot_tran(x=x)
ncs_restraints_group_list = transforms_obj.get_ncs_restraints_group_list()
refine_selection = flex.size_t(range(transforms_obj.truncated_hierarchy.atoms_size()))
for i in range(5):
data_weight = 1
tfg_obj = mmtbx.refinement.minimization_ncs_constraints.\
target_function_and_grads_real_space(
map_data = map_data,
xray_structure = xrs_poor,
ncs_restraints_group_list = ncs_restraints_group_list,
refine_selection = refine_selection,
real_space_gradients_delta = d_min/4,
restraints_manager = restraints_manager,
data_weight = data_weight,
refine_sites = True)
minimized = mmtbx.refinement.minimization_ncs_constraints.lbfgs(
target_and_grads_object = tfg_obj,
xray_structure = xrs_poor,
ncs_restraints_group_list = ncs_restraints_group_list,
refine_selection = refine_selection,
finite_grad_differences_test = False,
max_iterations = 60,
refine_sites = True)
xrs_poor = tfg_obj.xray_structure
ph_poor.adopt_xray_structure(tfg_obj.xray_structure)
ph_poor.write_pdb_file(file_name="refined.pdb")
if (__name__ == "__main__"):
t0=time.time()
run()
print("Time: %6.4f"%(time.time()-t0))
print("OK")
| 46.8
| 88
| 0.572002
|
84472b5b650301ff17aa25f4832116cd33e0adac
| 1,161
|
py
|
Python
|
tests/test_upload_download_artifact.py
|
UETAILab/uetai
|
ea065d51e7724cd07e215c9098bfcdddbe530481
|
[
"MIT"
] | 19
|
2021-09-25T05:08:56.000Z
|
2021-12-12T04:46:09.000Z
|
tests/test_upload_download_artifact.py
|
UETAILab/uetai
|
ea065d51e7724cd07e215c9098bfcdddbe530481
|
[
"MIT"
] | 19
|
2021-08-17T08:43:30.000Z
|
2021-11-10T07:34:14.000Z
|
tests/test_upload_download_artifact.py
|
UETAILab/uetai
|
ea065d51e7724cd07e215c9098bfcdddbe530481
|
[
"MIT"
] | null | null | null |
"""CLI unit-test"""
import os
import shutil
import time
from unittest import TestCase
import torch
from wandb.sdk.wandb_artifacts import Artifact
from uetai.logger.summary_writer import SummaryWriter
class TestSummaryWriterWandb(TestCase):
"""artifact upload/download tests"""
def __init__(self, *args, **kwargs):
super(TestSummaryWriterWandb, self).__init__(*args, **kwargs)
self.logger = SummaryWriter('uetai', log_tool='wandb')
def test_dataset_artifact_upload_download(self):
# create a (random) dataset
path = './tmp/dataset'
os.makedirs(path, exist_ok=True)
torch.save(torch.randn((1000, 1000)), os.path.join(path, "datapoint.tmp"))
# upload dataset as artifact
artifact = self.logger.log_dataset_artifact(path, dataset_name="dummy-set")
shutil.rmtree('./tmp') # del tmpdir
assert isinstance(artifact, Artifact)
time.sleep(5)
down_path, _ = self.logger.download_dataset_artifact(
dataset_name='dummy-set'
)
assert os.path.exists(down_path)
assert os.path.exists(os.path.join(down_path, "datapoint.tmp"))
| 33.171429
| 83
| 0.683032
|
b63f97f78a429f5a181f3b9a31e78d4e2ac53e8e
| 53
|
py
|
Python
|
pyjo_mongo/__init__.py
|
marcopaz/pyjo_mongo
|
3d30d7272f7a74eca55716816f4e0da8ba1cc2e8
|
[
"MIT"
] | 3
|
2018-03-24T07:36:37.000Z
|
2019-04-25T15:17:11.000Z
|
pyjo_mongo/__init__.py
|
marcopaz/pyjo_mongo
|
3d30d7272f7a74eca55716816f4e0da8ba1cc2e8
|
[
"MIT"
] | 1
|
2018-01-31T13:25:20.000Z
|
2018-01-31T13:25:20.000Z
|
pyjo_mongo/__init__.py
|
marcopaz/pyjo_mongo
|
3d30d7272f7a74eca55716816f4e0da8ba1cc2e8
|
[
"MIT"
] | 3
|
2018-01-25T14:07:35.000Z
|
2018-05-10T12:27:47.000Z
|
__version__ = '0.9.1'
from .document import Document
| 17.666667
| 30
| 0.754717
|
a784ba89bae1db295170663a7e7f316e2c05ffba
| 2,623
|
py
|
Python
|
poppy/cmd/akamai_update_papi_property_for_mod_san.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 3
|
2017-07-05T20:09:59.000Z
|
2018-11-27T22:02:57.000Z
|
poppy/cmd/akamai_update_papi_property_for_mod_san.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 24
|
2017-04-18T15:14:04.000Z
|
2019-03-20T19:09:07.000Z
|
poppy/cmd/akamai_update_papi_property_for_mod_san.py
|
satroutr/poppy
|
27417f86854d9e0a04726acc263ef0a2ce9f8f6e
|
[
"Apache-2.0"
] | 8
|
2017-04-03T13:24:27.000Z
|
2021-11-08T20:28:10.000Z
|
# Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from oslo_config import cfg
from oslo_log import log
from poppy.common import cli
from poppy.provider.akamai.background_jobs.update_property import \
update_property_flow
LOG = log.getLogger(__name__)
CLI_OPT = [
cfg.StrOpt('domain_name',
required=True,
help='The domain you want to add in host name (cnameFrom)'),
cfg.StrOpt('san_cert_name',
required=True,
help='Cert type of this cert'),
cfg.StrOpt('update_type',
default="hostsnames",
help='Update type for this update, available types are:'
'hostsnames, secureEdgeHost, rules'),
cfg.StrOpt('action',
default="add",
help='What kind of action, do you want "add" or "remove" '
'hostnames'),
cfg.StrOpt('property_spec',
default='akamai_https_san_config_numbers',
help='Property spec of the property to be updated'),
cfg.StrOpt('san_cert_domain_suffix',
default='edgekey.net',
help='Property spec of the property to be updated'),
]
@cli.runnable
def run():
# TODO(kgriffs): For now, we have to use the global config
# to pick up common options from openstack.common.log, since
# that module uses the global CONF instance exclusively.
conf = cfg.ConfigOpts()
conf.register_cli_opts(CLI_OPT)
conf(prog='akamai-papi-update')
LOG.info("%s: %s to %s, on property: %s" % (
conf.action,
conf.domain_name,
conf.san_cert_name,
conf.property_spec
))
update_info_list = json.dumps([
(conf.action,
{
"cnameFrom": conf.domain_name,
"cnameTo": '.'.join([conf.san_cert_name,
conf.san_cert_domain_suffix]),
"cnameType": "EDGE_HOSTNAME"
})
])
update_property_flow.run_update_property_flow(
conf.property_spec, conf.update_type, update_info_list)
| 33.202532
| 75
| 0.635151
|
f5fa4273252097512890b89745fdf70dcfb11d6c
| 3,550
|
py
|
Python
|
run.py
|
anonymous-rev/review
|
2cf8ef091ef4efc33752a184ab54ea95f1c921e5
|
[
"MIT"
] | null | null | null |
run.py
|
anonymous-rev/review
|
2cf8ef091ef4efc33752a184ab54ea95f1c921e5
|
[
"MIT"
] | null | null | null |
run.py
|
anonymous-rev/review
|
2cf8ef091ef4efc33752a184ab54ea95f1c921e5
|
[
"MIT"
] | null | null | null |
import os
import subprocess
SEEDS = range(10)
ALGORITHMS = ["m2td3", "soft_m2td3"]
ENVIRONMENTS_1_1 = ["Antv2-1_3", "HalfCheetahv2-1_4", "Hopperv2-1_3"]
ENVIRONMENTS_1_2 = [
"HumanoidStandupv2-1_16",
"InvertedPendulumv2-1_31",
"Walker2dv2-1_4",
]
ENVIRONMENTS_2_1 = ["Antv2-2_3_3", "HalfCheetahv2-2_4_7", "Hopperv2-2_3_3"]
ENVIRONMENTS_2_2 = [
"HumanoidStandupv2-2_16_8",
"InvertedPendulumv2-2_31_11",
"Walker2dv2-2_4_5",
]
ENVIRONMENTS_3_1 = ["Antv2-3_3_3_3", "HalfCheetahv2-3_4_7_4", "Hopperv2-3_3_3_4"]
ENVIRONMENTS_3_2 = [
"HumanoidStandupv2-3_16_5_8",
"Walker2dv2-3_4_5_6",
]
ENVIRONMENTS_SMALL = ["HalfCheetahv2-1_3", "Hopperv2-1_2"]
MAX_STEPS_DICT = {1: 2000000, 2: 4000000, 3: 5000000}
EVALUATE_INTERVAL_DICT = {1: 100000, 2: 200000, 3: 250000}
def wait(procs):
for proc_i, proc in enumerate(procs):
_ = proc.communicate()
return []
def extract_dim(environment):
dim = int(environment[environment.index("-") + 1])
return dim
def make_train_proc(algorithm, environment, seed, max_steps, evaluate_interval):
train_cmd = [
"python",
"main.py",
f"algorithm={algorithm}",
f"environment={environment}",
f"system.seed={seed}",
f"experiment_name={seed}_{algorithm}_{environment}",
f"algorithm.max_steps={max_steps}",
f"evaluation.evaluate_interval={evaluate_interval}",
]
with open(f"logs/{seed}_{algorithm}_{environment}.log", "w") as f:
proc = subprocess.Popen(
train_cmd, stdout=f, stderr=subprocess.PIPE, env=os.environ
)
return proc
def make_eval_proc(algorithm, environment, seed, max_steps, evaluate_interval, dim):
eval_cmd = [
"python",
"evaluate_mujoco.py",
"--dir",
f"experiments/{seed}_{algorithm}_{environment}",
"--interval",
f"{evaluate_interval}",
"--max_iteration",
f"{max_steps}",
"--dim_evaluate_point",
f"{dim}",
]
with open(f"logs/{seed}_{algorithm}_{environment}.log", "a") as f:
proc = subprocess.Popen(
eval_cmd, stdout=f, stderr=subprocess.PIPE, env=os.environ
)
return proc
def run_training(environment, algorithms):
procs = []
dim = extract_dim(environment)
max_steps, evaluate_interval = MAX_STEPS_DICT[dim], EVALUATE_INTERVAL_DICT[dim]
for algorithm in algorithms:
for seed in SEEDS:
proc = make_train_proc(
algorithm, environment, seed, max_steps, evaluate_interval
)
procs.append(proc)
return procs
def run_evaluate(environment, algorithms):
procs = []
dim = extract_dim(environment)
max_steps, evaluate_interval = MAX_STEPS_DICT[dim], EVALUATE_INTERVAL_DICT[dim]
for algorithm in algorithms:
for seed in SEEDS:
proc = make_eval_proc(
algorithm, environment, seed, max_steps, evaluate_interval, dim
)
procs.append(proc)
return procs
def run_algorithms(environment, algorithms):
procs = run_training(environment, algorithms)
procs = wait(procs)
procs = run_evaluate(environment, algorithms)
procs = wait(procs)
def main():
for environment in (
ENVIRONMENTS_1_1
+ ENVIRONMENTS_1_2
+ ENVIRONMENTS_2_1
+ ENVIRONMENTS_2_2
+ ENVIRONMENTS_3_1
+ ENVIRONMENTS_3_2
+ ENVIRONMENTS_SMALL
):
run_algorithms(environment, ALGORITHMS)
if __name__ == "__main__":
main()
| 26.893939
| 84
| 0.644789
|
d303c609c59dc3b54a82868bf196e8ef84d6d7c7
| 27,727
|
py
|
Python
|
wlmData.py
|
shiragolda/highfinesse_wavemeter_webapp
|
5f9dadd44c51efd229e2df6a0e8b7f378b85b245
|
[
"MIT"
] | null | null | null |
wlmData.py
|
shiragolda/highfinesse_wavemeter_webapp
|
5f9dadd44c51efd229e2df6a0e8b7f378b85b245
|
[
"MIT"
] | null | null | null |
wlmData.py
|
shiragolda/highfinesse_wavemeter_webapp
|
5f9dadd44c51efd229e2df6a0e8b7f378b85b245
|
[
"MIT"
] | null | null | null |
######################################################################################################
# @package wlmData
# @file wlmData.py
# @copyright HighFinesse GmbH.
# @author Lovas Szilard <lovas@highfinesse.de>
# @date 2018.09.14
# @version 0.2
#
# Homepage: http://www.highfinesse.com/
#
# @brief Python wrapper for wlmData.dll.
#
# Generator script: wlmData_py_wrapper.m
#
# Changelog:
# ----------
# 2018.09.12
# v0.1 - Initial release
# 2018.09.14
# v0.2 - Constant values added
# 2018.09.15
# v0.3 - Constant values separated to wlmConst.py, LoadDLL() added
# Updated for linux support Jan 2021 by Shira Jackson
#/
import ctypes
import sys
import time
def LoadDLL():
# windows server:
if sys.platform=='win32':
dll = ctypes.WinDLL("wlmData.dll")
print("Loaded windows server library")
# linux clients:
elif sys.platform=='linux':
try:
#print("here")
dll = ctypes.cdll.LoadLibrary("libwlmData.so")
time.sleep(2)
print("Loaded linux client library")
except Exception as e:
print(e)
else:
print("Unknown OS")
return None
# LONG_PTR Instantiate(long RFC, long Mode, LONG_PTR P1, long P2)
dll.Instantiate.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.c_long ]
dll.Instantiate.restype = ctypes.POINTER(ctypes.c_long)
# long WaitForWLMEvent(lref Mode, lref IntVal, dref DblVal)
dll.WaitForWLMEvent.argtypes = [ ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double) ]
dll.WaitForWLMEvent.restype = ctypes.c_long
# long WaitForWLMEventEx(lref Ver, lref Mode, lref IntVal, dref DblVal, lref Res1)
dll.WaitForWLMEventEx.argtypes = [ ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double), ctypes.POINTER(ctypes.c_long) ]
dll.WaitForWLMEventEx.restype = ctypes.c_long
# long WaitForNextWLMEvent(lref Mode, lref IntVal, dref DblVal)
dll.WaitForNextWLMEvent.argtypes = [ ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double) ]
dll.WaitForNextWLMEvent.restype = ctypes.c_long
# long WaitForNextWLMEventEx(lref Ver, lref Mode, lref IntVal, dref DblVal, lref Res1)
dll.WaitForNextWLMEventEx.argtypes = [ ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double), ctypes.POINTER(ctypes.c_long) ]
dll.WaitForNextWLMEventEx.restype = ctypes.c_long
# void ClearWLMEvents(void)
dll.ClearWLMEvents.argtypes = [ ]
dll.ClearWLMEvents.restype = None
# long ControlWLM(long Action, LONG_PTR App, long Ver)
dll.ControlWLM.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.c_long ]
dll.ControlWLM.restype = ctypes.c_long
# long ControlWLMEx(long Action, LONG_PTR App, long Ver, long Delay, long Res)
dll.ControlWLMEx.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.ControlWLMEx.restype = ctypes.c_long
# __int64 SynchroniseWLM(long Mode, __int64 TS)
dll.SynchroniseWLM.argtypes = [ ctypes.c_long, ctypes.c_longlong ]
dll.SynchroniseWLM.restype = ctypes.c_longlong
# long SetMeasurementDelayMethod(long Mode, long Delay)
dll.SetMeasurementDelayMethod.argtypes = [ ctypes.c_long, ctypes.c_long ]
dll.SetMeasurementDelayMethod.restype = ctypes.c_long
# long SetWLMPriority(long PPC, long Res1, long Res2)
dll.SetWLMPriority.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetWLMPriority.restype = ctypes.c_long
# long PresetWLMIndex(long Ver)
dll.PresetWLMIndex.argtypes = [ ctypes.c_long ]
dll.PresetWLMIndex.restype = ctypes.c_long
# long GetWLMVersion(long Ver)
dll.GetWLMVersion.argtypes = [ ctypes.c_long ]
dll.GetWLMVersion.restype = ctypes.c_long
# long GetWLMIndex(long Ver)
dll.GetWLMIndex.argtypes = [ ctypes.c_long ]
dll.GetWLMIndex.restype = ctypes.c_long
# long GetWLMCount(long V)
dll.GetWLMCount.argtypes = [ ctypes.c_long ]
dll.GetWLMCount.restype = ctypes.c_long
# double GetWavelength(double WL)
dll.GetWavelength.argtypes = [ ctypes.c_double ]
dll.GetWavelength.restype = ctypes.c_double
# double GetWavelength2(double WL2)
dll.GetWavelength2.argtypes = [ ctypes.c_double ]
dll.GetWavelength2.restype = ctypes.c_double
# double GetWavelengthNum(long num, double WL)
dll.GetWavelengthNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetWavelengthNum.restype = ctypes.c_double
# double GetCalWavelength(long ba, double WL)
dll.GetCalWavelength.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetCalWavelength.restype = ctypes.c_double
# double GetCalibrationEffect(double CE)
dll.GetCalibrationEffect.argtypes = [ ctypes.c_double ]
dll.GetCalibrationEffect.restype = ctypes.c_double
# double GetFrequency(double F)
dll.GetFrequency.argtypes = [ ctypes.c_double ]
dll.GetFrequency.restype = ctypes.c_double
# double GetFrequency2(double F2)
dll.GetFrequency2.argtypes = [ ctypes.c_double ]
dll.GetFrequency2.restype = ctypes.c_double
# double GetFrequencyNum(long num, double F)
dll.GetFrequencyNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetFrequencyNum.restype = ctypes.c_double
# double GetLinewidth(long Index, double LW)
dll.GetLinewidth.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetLinewidth.restype = ctypes.c_double
# double GetLinewidthNum(long num, double LW)
dll.GetLinewidthNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetLinewidthNum.restype = ctypes.c_double
# double GetDistance(double D)
dll.GetDistance.argtypes = [ ctypes.c_double ]
dll.GetDistance.restype = ctypes.c_double
# double GetAnalogIn(double AI)
dll.GetAnalogIn.argtypes = [ ctypes.c_double ]
dll.GetAnalogIn.restype = ctypes.c_double
# double GetTemperature(double T)
dll.GetTemperature.argtypes = [ ctypes.c_double ]
dll.GetTemperature.restype = ctypes.c_double
# long SetTemperature(double T)
dll.SetTemperature.argtypes = [ ctypes.c_double ]
dll.SetTemperature.restype = ctypes.c_long
# double GetPressure(double P)
dll.GetPressure.argtypes = [ ctypes.c_double ]
dll.GetPressure.restype = ctypes.c_double
# long SetPressure(long Mode, double P)
dll.SetPressure.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.SetPressure.restype = ctypes.c_long
# double GetExternalInput(long Index, double I)
dll.GetExternalInput.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetExternalInput.restype = ctypes.c_double
# long SetExternalInput(long Index, double I)
dll.SetExternalInput.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.SetExternalInput.restype = ctypes.c_long
# long GetExtraSetting(long Index, lref lGet, dref dGet, sref sGet)
dll.GetExtraSetting.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double), ctypes.c_char_p ]
dll.GetExtraSetting.restype = ctypes.c_long
# long SetExtraSetting(long Index, long lSet, double dSet, sref sSet)
dll.SetExtraSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_double, ctypes.c_char_p ]
dll.SetExtraSetting.restype = ctypes.c_long
# unsigned short GetExposure(unsigned short E)
dll.GetExposure.argtypes = [ ctypes.c_ushort ]
dll.GetExposure.restype = ctypes.c_ushort
# long SetExposure(unsigned short E)
dll.SetExposure.argtypes = [ ctypes.c_ushort ]
dll.SetExposure.restype = ctypes.c_long
# unsigned short GetExposure2(unsigned short E2)
dll.GetExposure2.argtypes = [ ctypes.c_ushort ]
dll.GetExposure2.restype = ctypes.c_ushort
# long SetExposure2(unsigned short E2)
dll.SetExposure2.argtypes = [ ctypes.c_ushort ]
dll.SetExposure2.restype = ctypes.c_long
# long GetExposureNum(long num, long arr, long E)
dll.GetExposureNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.GetExposureNum.restype = ctypes.c_long
# long SetExposureNum(long num, long arr, long E)
dll.SetExposureNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetExposureNum.restype = ctypes.c_long
# double GetExposureNumEx(long num, long arr, double E)
dll.GetExposureNumEx.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_double ]
dll.GetExposureNumEx.restype = ctypes.c_double
# long SetExposureNumEx(long num, long arr, double E)
dll.SetExposureNumEx.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_double ]
dll.SetExposureNumEx.restype = ctypes.c_long
# bool GetExposureMode(bool EM)
dll.GetExposureMode.argtypes = [ ctypes.c_bool ]
dll.GetExposureMode.restype = ctypes.c_bool
# long SetExposureMode(bool EM)
dll.SetExposureMode.argtypes = [ ctypes.c_bool ]
dll.SetExposureMode.restype = ctypes.c_long
# long GetExposureModeNum(long num, bool EM)
dll.GetExposureModeNum.argtypes = [ ctypes.c_long, ctypes.c_bool ]
dll.GetExposureModeNum.restype = ctypes.c_long
# long SetExposureModeNum(long num, bool EM)
dll.SetExposureModeNum.argtypes = [ ctypes.c_long, ctypes.c_bool ]
dll.SetExposureModeNum.restype = ctypes.c_long
# long GetExposureRange(long ER)
dll.GetExposureRange.argtypes = [ ctypes.c_long ]
dll.GetExposureRange.restype = ctypes.c_long
# long GetAutoExposureSetting(long num, long AES, lref iVal, dref dVal)
dll.GetAutoExposureSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double) ]
dll.GetAutoExposureSetting.restype = ctypes.c_long
# long SetAutoExposureSetting(long num, long AES, long iVal, double dVal)
dll.SetAutoExposureSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long, ctypes.c_double ]
dll.SetAutoExposureSetting.restype = ctypes.c_long
# unsigned short GetResultMode(unsigned short RM)
dll.GetResultMode.argtypes = [ ctypes.c_ushort ]
dll.GetResultMode.restype = ctypes.c_ushort
# long SetResultMode(unsigned short RM)
dll.SetResultMode.argtypes = [ ctypes.c_ushort ]
dll.SetResultMode.restype = ctypes.c_long
# unsigned short GetRange(unsigned short R)
dll.GetRange.argtypes = [ ctypes.c_ushort ]
dll.GetRange.restype = ctypes.c_ushort
# long SetRange(unsigned short R)
dll.SetRange.argtypes = [ ctypes.c_ushort ]
dll.SetRange.restype = ctypes.c_long
# unsigned short GetPulseMode(unsigned short PM)
dll.GetPulseMode.argtypes = [ ctypes.c_ushort ]
dll.GetPulseMode.restype = ctypes.c_ushort
# long SetPulseMode(unsigned short PM)
dll.SetPulseMode.argtypes = [ ctypes.c_ushort ]
dll.SetPulseMode.restype = ctypes.c_long
# long GetPulseDelay(long PD)
dll.GetPulseDelay.argtypes = [ ctypes.c_long ]
dll.GetPulseDelay.restype = ctypes.c_long
# long SetPulseDelay(long PD)
dll.SetPulseDelay.argtypes = [ ctypes.c_long ]
dll.SetPulseDelay.restype = ctypes.c_long
# unsigned short GetWideMode(unsigned short WM)
dll.GetWideMode.argtypes = [ ctypes.c_ushort ]
dll.GetWideMode.restype = ctypes.c_ushort
# long SetWideMode(unsigned short WM)
dll.SetWideMode.argtypes = [ ctypes.c_ushort ]
dll.SetWideMode.restype = ctypes.c_long
# long GetDisplayMode(long DM)
dll.GetDisplayMode.argtypes = [ ctypes.c_long ]
dll.GetDisplayMode.restype = ctypes.c_long
# long SetDisplayMode(long DM)
dll.SetDisplayMode.argtypes = [ ctypes.c_long ]
dll.SetDisplayMode.restype = ctypes.c_long
# bool GetFastMode(bool FM)
dll.GetFastMode.argtypes = [ ctypes.c_bool ]
dll.GetFastMode.restype = ctypes.c_bool
# long SetFastMode(bool FM)
dll.SetFastMode.argtypes = [ ctypes.c_bool ]
dll.SetFastMode.restype = ctypes.c_long
# bool GetLinewidthMode(bool LM)
dll.GetLinewidthMode.argtypes = [ ctypes.c_bool ]
dll.GetLinewidthMode.restype = ctypes.c_bool
# long SetLinewidthMode(bool LM)
dll.SetLinewidthMode.argtypes = [ ctypes.c_bool ]
dll.SetLinewidthMode.restype = ctypes.c_long
# bool GetDistanceMode(bool DM)
dll.GetDistanceMode.argtypes = [ ctypes.c_bool ]
dll.GetDistanceMode.restype = ctypes.c_bool
# long SetDistanceMode(bool DM)
dll.SetDistanceMode.argtypes = [ ctypes.c_bool ]
dll.SetDistanceMode.restype = ctypes.c_long
# long GetSwitcherMode(long SM)
dll.GetSwitcherMode.argtypes = [ ctypes.c_long ]
dll.GetSwitcherMode.restype = ctypes.c_long
# long SetSwitcherMode(long SM)
dll.SetSwitcherMode.argtypes = [ ctypes.c_long ]
dll.SetSwitcherMode.restype = ctypes.c_long
# long GetSwitcherChannel(long CH)
dll.GetSwitcherChannel.argtypes = [ ctypes.c_long ]
dll.GetSwitcherChannel.restype = ctypes.c_long
# long SetSwitcherChannel(long CH)
dll.SetSwitcherChannel.argtypes = [ ctypes.c_long ]
dll.SetSwitcherChannel.restype = ctypes.c_long
# long GetSwitcherSignalStates(long Signal, lref Use, lref Show)
dll.GetSwitcherSignalStates.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_long) ]
dll.GetSwitcherSignalStates.restype = ctypes.c_long
# long SetSwitcherSignalStates(long Signal, long Use, long Show)
dll.SetSwitcherSignalStates.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetSwitcherSignalStates.restype = ctypes.c_long
# long SetSwitcherSignal(long Signal, long Use, long Show)
dll.SetSwitcherSignal.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetSwitcherSignal.restype = ctypes.c_long
# long GetAutoCalMode(long ACM)
dll.GetAutoCalMode.argtypes = [ ctypes.c_long ]
dll.GetAutoCalMode.restype = ctypes.c_long
# long SetAutoCalMode(long ACM)
dll.SetAutoCalMode.argtypes = [ ctypes.c_long ]
dll.SetAutoCalMode.restype = ctypes.c_long
# long GetAutoCalSetting(long ACS, lref val, long Res1, lref Res2)
dll.GetAutoCalSetting.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.c_long, ctypes.POINTER(ctypes.c_long) ]
dll.GetAutoCalSetting.restype = ctypes.c_long
# long SetAutoCalSetting(long ACS, long val, long Res1, long Res2)
dll.SetAutoCalSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetAutoCalSetting.restype = ctypes.c_long
# long GetActiveChannel(long Mode, lref Port, long Res1)
dll.GetActiveChannel.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.c_long ]
dll.GetActiveChannel.restype = ctypes.c_long
# long SetActiveChannel(long Mode, long Port, long CH, long Res1)
dll.SetActiveChannel.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetActiveChannel.restype = ctypes.c_long
# long GetChannelsCount(long C)
dll.GetChannelsCount.argtypes = [ ctypes.c_long ]
dll.GetChannelsCount.restype = ctypes.c_long
# unsigned short GetOperationState(unsigned short OS)
dll.GetOperationState.argtypes = [ ctypes.c_ushort ]
dll.GetOperationState.restype = ctypes.c_ushort
# long Operation(unsigned short Op)
dll.Operation.argtypes = [ ctypes.c_ushort ]
dll.Operation.restype = ctypes.c_long
# long SetOperationFile(sref lpFile)
dll.SetOperationFile.argtypes = [ ctypes.c_char_p ]
dll.SetOperationFile.restype = ctypes.c_long
# long Calibration(long Type, long Unit, double Value, long Channel)
dll.Calibration.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_double, ctypes.c_long ]
dll.Calibration.restype = ctypes.c_long
# long RaiseMeasurementEvent(long Mode)
dll.RaiseMeasurementEvent.argtypes = [ ctypes.c_long ]
dll.RaiseMeasurementEvent.restype = ctypes.c_long
# long TriggerMeasurement(long Action)
dll.TriggerMeasurement.argtypes = [ ctypes.c_long ]
dll.TriggerMeasurement.restype = ctypes.c_long
# long GetTriggerState(long TS)
dll.GetTriggerState.argtypes = [ ctypes.c_long ]
dll.GetTriggerState.restype = ctypes.c_long
# long GetInterval(long I)
dll.GetInterval.argtypes = [ ctypes.c_long ]
dll.GetInterval.restype = ctypes.c_long
# long SetInterval(long I)
dll.SetInterval.argtypes = [ ctypes.c_long ]
dll.SetInterval.restype = ctypes.c_long
# bool GetIntervalMode(bool IM)
dll.GetIntervalMode.argtypes = [ ctypes.c_bool ]
dll.GetIntervalMode.restype = ctypes.c_bool
# long SetIntervalMode(bool IM)
dll.SetIntervalMode.argtypes = [ ctypes.c_bool ]
dll.SetIntervalMode.restype = ctypes.c_long
# long GetBackground(long BG)
dll.GetBackground.argtypes = [ ctypes.c_long ]
dll.GetBackground.restype = ctypes.c_long
# long SetBackground(long BG)
dll.SetBackground.argtypes = [ ctypes.c_long ]
dll.SetBackground.restype = ctypes.c_long
# long GetAveragingSettingNum(long num, long AS, long Value)
dll.GetAveragingSettingNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.GetAveragingSettingNum.restype = ctypes.c_long
# long SetAveragingSettingNum(long num, long AS, long Value)
dll.SetAveragingSettingNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.SetAveragingSettingNum.restype = ctypes.c_long
# bool GetLinkState(bool LS)
dll.GetLinkState.argtypes = [ ctypes.c_bool ]
dll.GetLinkState.restype = ctypes.c_bool
# long SetLinkState(bool LS)
dll.SetLinkState.argtypes = [ ctypes.c_bool ]
dll.SetLinkState.restype = ctypes.c_long
# void LinkSettingsDlg(void)
dll.LinkSettingsDlg.argtypes = [ ]
dll.LinkSettingsDlg.restype = None
# long GetPatternItemSize(long Index)
dll.GetPatternItemSize.argtypes = [ ctypes.c_long ]
dll.GetPatternItemSize.restype = ctypes.c_long
# long GetPatternItemCount(long Index)
dll.GetPatternItemCount.argtypes = [ ctypes.c_long ]
dll.GetPatternItemCount.restype = ctypes.c_long
# ULONG_PTR GetPattern(long Index)
dll.GetPattern.argtypes = [ ctypes.c_long ]
dll.GetPattern.restype = ctypes.POINTER(ctypes.c_ulong)
# ULONG_PTR GetPatternNum(long Chn, long Index)
dll.GetPatternNum.argtypes = [ ctypes.c_long, ctypes.c_long ]
dll.GetPatternNum.restype = ctypes.POINTER(ctypes.c_ulong)
# long GetPatternData(long Index, ULONG_PTR PArray)
dll.GetPatternData.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_ulong) ]
dll.GetPatternData.restype = ctypes.c_long
# long GetPatternDataNum(long Chn, long Index, ULONG_PTR PArray)
dll.GetPatternDataNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.POINTER(ctypes.c_ulong) ]
dll.GetPatternDataNum.restype = ctypes.c_long
# long SetPattern(long Index, long iEnable)
dll.SetPattern.argtypes = [ ctypes.c_long, ctypes.c_long ]
dll.SetPattern.restype = ctypes.c_long
# long SetPatternData(long Index, ULONG_PTR PArray)
dll.SetPatternData.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_ulong) ]
dll.SetPatternData.restype = ctypes.c_long
# bool GetAnalysisMode(bool AM)
dll.GetAnalysisMode.argtypes = [ ctypes.c_bool ]
dll.GetAnalysisMode.restype = ctypes.c_bool
# long SetAnalysisMode(bool AM)
dll.SetAnalysisMode.argtypes = [ ctypes.c_bool ]
dll.SetAnalysisMode.restype = ctypes.c_long
# long GetAnalysisItemSize(long Index)
dll.GetAnalysisItemSize.argtypes = [ ctypes.c_long ]
dll.GetAnalysisItemSize.restype = ctypes.c_long
# long GetAnalysisItemCount(long Index)
dll.GetAnalysisItemCount.argtypes = [ ctypes.c_long ]
dll.GetAnalysisItemCount.restype = ctypes.c_long
# ULONG_PTR GetAnalysis(long Index)
dll.GetAnalysis.argtypes = [ ctypes.c_long ]
dll.GetAnalysis.restype = ctypes.POINTER(ctypes.c_ulong)
# long GetAnalysisData(long Index, ULONG_PTR PArray)
dll.GetAnalysisData.argtypes = [ ctypes.c_long, ctypes.POINTER(ctypes.c_ulong) ]
dll.GetAnalysisData.restype = ctypes.c_long
# long SetAnalysis(long Index, long iEnable)
dll.SetAnalysis.argtypes = [ ctypes.c_long, ctypes.c_long ]
dll.SetAnalysis.restype = ctypes.c_long
# long GetMinPeak(long M1)
dll.GetMinPeak.argtypes = [ ctypes.c_long ]
dll.GetMinPeak.restype = ctypes.c_long
# long GetMinPeak2(long M2)
dll.GetMinPeak2.argtypes = [ ctypes.c_long ]
dll.GetMinPeak2.restype = ctypes.c_long
# long GetMaxPeak(long X1)
dll.GetMaxPeak.argtypes = [ ctypes.c_long ]
dll.GetMaxPeak.restype = ctypes.c_long
# long GetMaxPeak2(long X2)
dll.GetMaxPeak2.argtypes = [ ctypes.c_long ]
dll.GetMaxPeak2.restype = ctypes.c_long
# long GetAvgPeak(long A1)
dll.GetAvgPeak.argtypes = [ ctypes.c_long ]
dll.GetAvgPeak.restype = ctypes.c_long
# long GetAvgPeak2(long A2)
dll.GetAvgPeak2.argtypes = [ ctypes.c_long ]
dll.GetAvgPeak2.restype = ctypes.c_long
# long SetAvgPeak(long PA)
dll.SetAvgPeak.argtypes = [ ctypes.c_long ]
dll.SetAvgPeak.restype = ctypes.c_long
# long GetAmplitudeNum(long num, long Index, long A)
dll.GetAmplitudeNum.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.GetAmplitudeNum.restype = ctypes.c_long
# double GetIntensityNum(long num, double I)
dll.GetIntensityNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetIntensityNum.restype = ctypes.c_double
# double GetPowerNum(long num, double P)
dll.GetPowerNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetPowerNum.restype = ctypes.c_double
# unsigned short GetDelay(unsigned short D)
dll.GetDelay.argtypes = [ ctypes.c_ushort ]
dll.GetDelay.restype = ctypes.c_ushort
# long SetDelay(unsigned short D)
dll.SetDelay.argtypes = [ ctypes.c_ushort ]
dll.SetDelay.restype = ctypes.c_long
# unsigned short GetShift(unsigned short S)
dll.GetShift.argtypes = [ ctypes.c_ushort ]
dll.GetShift.restype = ctypes.c_ushort
# long SetShift(unsigned short S)
dll.SetShift.argtypes = [ ctypes.c_ushort ]
dll.SetShift.restype = ctypes.c_long
# unsigned short GetShift2(unsigned short S2)
dll.GetShift2.argtypes = [ ctypes.c_ushort ]
dll.GetShift2.restype = ctypes.c_ushort
# long SetShift2(unsigned short S2)
dll.SetShift2.argtypes = [ ctypes.c_ushort ]
dll.SetShift2.restype = ctypes.c_long
# bool GetDeviationMode(bool DM)
dll.GetDeviationMode.argtypes = [ ctypes.c_bool ]
dll.GetDeviationMode.restype = ctypes.c_bool
# long SetDeviationMode(bool DM)
dll.SetDeviationMode.argtypes = [ ctypes.c_bool ]
dll.SetDeviationMode.restype = ctypes.c_long
# double GetDeviationReference(double DR)
dll.GetDeviationReference.argtypes = [ ctypes.c_double ]
dll.GetDeviationReference.restype = ctypes.c_double
# long SetDeviationReference(double DR)
dll.SetDeviationReference.argtypes = [ ctypes.c_double ]
dll.SetDeviationReference.restype = ctypes.c_long
# long GetDeviationSensitivity(long DS)
dll.GetDeviationSensitivity.argtypes = [ ctypes.c_long ]
dll.GetDeviationSensitivity.restype = ctypes.c_long
# long SetDeviationSensitivity(long DS)
dll.SetDeviationSensitivity.argtypes = [ ctypes.c_long ]
dll.SetDeviationSensitivity.restype = ctypes.c_long
# double GetDeviationSignal(double DS)
dll.GetDeviationSignal.argtypes = [ ctypes.c_double ]
dll.GetDeviationSignal.restype = ctypes.c_double
# double GetDeviationSignalNum(long Port, double DS)
dll.GetDeviationSignalNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.GetDeviationSignalNum.restype = ctypes.c_double
# long SetDeviationSignal(double DS)
dll.SetDeviationSignal.argtypes = [ ctypes.c_double ]
dll.SetDeviationSignal.restype = ctypes.c_long
# long SetDeviationSignalNum(long Port, double DS)
dll.SetDeviationSignalNum.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.SetDeviationSignalNum.restype = ctypes.c_long
# double RaiseDeviationSignal(long iType, double dSignal)
dll.RaiseDeviationSignal.argtypes = [ ctypes.c_long, ctypes.c_double ]
dll.RaiseDeviationSignal.restype = ctypes.c_double
# long GetPIDCourse(sref PIDC)
dll.GetPIDCourse.argtypes = [ ctypes.c_char_p ]
dll.GetPIDCourse.restype = ctypes.c_long
# long SetPIDCourse(sref PIDC)
dll.SetPIDCourse.argtypes = [ ctypes.c_char_p ]
dll.SetPIDCourse.restype = ctypes.c_long
# long GetPIDCourseNum(long Port, sref PIDC)
dll.GetPIDCourseNum.argtypes = [ ctypes.c_long, ctypes.c_char_p ]
dll.GetPIDCourseNum.restype = ctypes.c_long
# long SetPIDCourseNum(long Port, sref PIDC)
dll.SetPIDCourseNum.argtypes = [ ctypes.c_long, ctypes.c_char_p ]
dll.SetPIDCourseNum.restype = ctypes.c_long
# long GetPIDSetting(long PS, long Port, lref iSet, dref dSet)
dll.GetPIDSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double) ]
dll.GetPIDSetting.restype = ctypes.c_long
# long SetPIDSetting(long PS, long Port, long iSet, double dSet)
dll.SetPIDSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long, ctypes.c_double ]
dll.SetPIDSetting.restype = ctypes.c_long
# long GetLaserControlSetting(long PS, long Port, lref iSet, dref dSet, sref sSet)
dll.GetLaserControlSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.POINTER(ctypes.c_long), ctypes.POINTER(ctypes.c_double), ctypes.c_char_p ]
dll.GetLaserControlSetting.restype = ctypes.c_long
# long SetLaserControlSetting(long PS, long Port, long iSet, double dSet, sref sSet)
dll.SetLaserControlSetting.argtypes = [ ctypes.c_long, ctypes.c_long, ctypes.c_long, ctypes.c_double, ctypes.c_char_p ]
dll.SetLaserControlSetting.restype = ctypes.c_long
# long ClearPIDHistory(long Port)
dll.ClearPIDHistory.argtypes = [ ctypes.c_long ]
dll.ClearPIDHistory.restype = ctypes.c_long
# double ConvertUnit(double Val, long uFrom, long uTo)
dll.ConvertUnit.argtypes = [ ctypes.c_double, ctypes.c_long, ctypes.c_long ]
dll.ConvertUnit.restype = ctypes.c_double
# double ConvertDeltaUnit(double Base, double Delta, long uBase, long uFrom, long uTo)
dll.ConvertDeltaUnit.argtypes = [ ctypes.c_double, ctypes.c_double, ctypes.c_long, ctypes.c_long, ctypes.c_long ]
dll.ConvertDeltaUnit.restype = ctypes.c_double
# bool GetReduced(bool R)
dll.GetReduced.argtypes = [ ctypes.c_bool ]
dll.GetReduced.restype = ctypes.c_bool
# long SetReduced(bool R)
dll.SetReduced.argtypes = [ ctypes.c_bool ]
dll.SetReduced.restype = ctypes.c_long
# unsigned short GetScale(unsigned short S)
dll.GetScale.argtypes = [ ctypes.c_ushort ]
dll.GetScale.restype = ctypes.c_ushort
# long SetScale(unsigned short S)
dll.SetScale.argtypes = [ ctypes.c_ushort ]
dll.SetScale.restype = ctypes.c_long
return dll
| 40.715125
| 201
| 0.711364
|
4792c99c5eb3c7ec94067cf58fe32deb1be04462
| 5,787
|
py
|
Python
|
pyeit/mesh/utils.py
|
DavidMetzIMT/pyEIT
|
a3c64f7b869e7a00a102fc93feea4999c8bed6d1
|
[
"BSD-3-Clause"
] | null | null | null |
pyeit/mesh/utils.py
|
DavidMetzIMT/pyEIT
|
a3c64f7b869e7a00a102fc93feea4999c8bed6d1
|
[
"BSD-3-Clause"
] | null | null | null |
pyeit/mesh/utils.py
|
DavidMetzIMT/pyEIT
|
a3c64f7b869e7a00a102fc93feea4999c8bed6d1
|
[
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# pylint: disable=invalid-name, no-member, no-name-in-module
""" post process for distmesh 2d and 3d """
# Copyright (c) Benyuan Liu. All rights reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
from __future__ import division, absolute_import, print_function
import numpy as np
import scipy.linalg as la
def dist(p):
"""distances to origin of nodes. '3D', 'ND' compatible
Parameters
----------
p : array_like
points in 2D, 3D. i.e., in 3D
[[x, y, z],
[2, 3, 3],
...
[1, 2, 1]]
Returns
-------
array_like
distances of points to origin
"""
if p.ndim == 1:
d = np.sqrt(np.sum(p**2))
else:
d = np.sqrt(np.sum(p**2, axis=1))
return d
def edge_project(pts, fd, h0=1.0):
"""project points back on edge"""
g_vec = edge_grad(pts, fd, h0)
return pts - g_vec
def edge_grad(p, fd, h0=1.0):
"""
project points back on the boundary (where fd=0) using numerical gradient
3D, ND compatible
Parameters
----------
p : array_like
points on 2D, 3D
fd : str
function handler of distances
h0 : float
minimal distance
Returns
-------
array_like
gradients of points on the boundary
Note
----
numerical gradient:
f'_x = (f(p+delta_x) - f(p)) / delta_x
f'_y = (f(p+delta_y) - f(p)) / delta_y
f'_z = (f(p+delta_z) - f(p)) / delta_z
you should specify h0 according to your actual mesh size
"""
# d_eps = np.sqrt(np.finfo(float).eps)*h0
# d_eps = np.sqrt(np.finfo(float).eps)
d_eps = 1e-8 * h0
# get dimensions
if np.ndim(p) == 1:
p = p[:, np.newaxis]
# distance
d = fd(p)
# calculate the gradient of each axis
ndim = p.shape[1]
pts_xyz = np.repeat(p, ndim, axis=0)
delta_xyz = np.repeat([np.eye(ndim)], p.shape[0], axis=0).reshape(-1, ndim)
deps_xyz = d_eps * delta_xyz
g_xyz = (fd(pts_xyz + deps_xyz) - np.repeat(d, ndim, axis=0)) / d_eps
# normalize gradient, avoid divide by zero
g = g_xyz.reshape(-1, ndim)
g2 = np.sum(g**2, axis=1)
# move unit
g_num = g / g2[:, np.newaxis] * d[:, np.newaxis]
return g_num
def edge_list(tri):
"""
edge of delaunay triangles are unique bars, O(n^2)
besides this duplication test, you may also use fd to test edge bars,
where the two nodes of any edge both satisfy fd=0 (or with a geps gap)
Parameters
----------
tri : array_like
triangles list
"""
bars = tri[:, [[0, 1], [1, 2], [2, 0]]].reshape((-1, 2))
bars = np.sort(bars, axis=1)
bars = bars.view("i, i")
n = bars.shape[0]
ix = [True] * n
for i in range(n - 1):
# skip if already marked as duplicated
if ix[i] is not True:
continue
# mark duplicate entries, at most 2-duplicate bars, if found, break
for j in range(i + 1, n):
if bars[i] == bars[j]:
ix[i], ix[j] = False, False
break
return bars[np.array(ix)].view("i")
def check_ccw(no2xy, el2no):
"""
check whether the simplices are CCW ordered, triangles only
"""
xys = no2xy[el2no]
a = [tri_area(xy) > 0 for xy in xys]
return np.all(a)
def check_order(no2xy, el2no):
"""
loop over all elements, calculate the Area of Elements (aoe)
if AOE > 0, then the order of element is correct
if AOE < 0, reorder the element
Parameters
----------
no2xy : NDArray
Nx2 ndarray, (x,y) locations for points
el2no : NDArray
Mx3 ndarray, elements (triangles) connectivity
Returns
-------
NDArray
ae, area of each element
Notes
-----
tetrahedron should be parsed that the sign of volume is [1, -1, 1, -1]
"""
el_num, n_vertices = np.shape(el2no)
# select ae function
if n_vertices == 3:
_fn = tri_area
elif n_vertices == 4:
_fn = tet_volume
# calculate ae and re-order tri if necessary
for ei in range(el_num):
no = el2no[ei, :]
xy = no2xy[no, :]
v = _fn(xy)
# if CCW, area should be positive, otherwise,
if v < 0:
el2no[ei, [1, 2]] = el2no[ei, [2, 1]]
return el2no
def tri_area(xy):
"""
return area of a triangle, given its tri-coordinates xy
Parameters
----------
xy : NDArray
(x,y) of nodes 1,2,3 given in counterclockwise manner
Returns
-------
float
area of this element
"""
s = xy[[2, 0]] - xy[[1, 2]]
a_tot = 0.50 * la.det(s)
# (should be positive if tri-points are counter-clockwise)
return a_tot
def tet_volume(xyz):
"""calculate the volume of tetrahedron"""
s = xyz[[2, 3, 0]] - xyz[[1, 2, 3]]
v_tot = (1.0 / 6.0) * la.det(s)
return v_tot
def to_polar(xy, shift=True, sort=True):
vec = xy
if shift:
pc = np.median(xy, axis=0)
print(pc)
vec = vec - pc
dist = np.sqrt(np.sum(vec**2, axis=1))
deg = np.rad2deg(np.arctan2(vec[:, 1], vec[:, 0]))
deg = deg % 360
if sort:
ind = np.argsort(deg)
dist, deg = dist[ind], deg[ind]
return dist, deg
def to_xy(dist, deg):
x = dist * np.cos(np.deg2rad(deg))
y = dist * np.sin(np.deg2rad(deg))
return x, y
if __name__ == "__main__":
# test 'edge_project'
def fd_test(p):
"""unit circle/ball"""
if len(p.shape) == 1:
d = np.sqrt(np.sum(p**2)) - 1.0
else:
d = np.sqrt(np.sum(p**2, axis=1)) - 1.0
return d
p_test = [[1, 2, 3], [2, 2, 2], [1, 3, 3], [1, 1, 1]]
a = edge_project(p_test, fd_test)
# test 'edge_list'
| 23.913223
| 79
| 0.551236
|
bb2da422850392562a6cfc6d13599d1b52ab7c33
| 2,315
|
py
|
Python
|
airflow/upgrade/rules/airflow_macro_plugin_removed.py
|
kevin0120/airflow
|
fa263cbf0ac002bdb26239ce36d5dc2a1b6251fd
|
[
"Apache-2.0"
] | 15
|
2017-04-06T09:01:50.000Z
|
2021-10-02T13:54:31.000Z
|
airflow/upgrade/rules/airflow_macro_plugin_removed.py
|
kevin0120/airflow
|
fa263cbf0ac002bdb26239ce36d5dc2a1b6251fd
|
[
"Apache-2.0"
] | 36
|
2021-11-26T00:08:49.000Z
|
2021-11-26T00:09:33.000Z
|
airflow/upgrade/rules/airflow_macro_plugin_removed.py
|
kevin0120/airflow
|
fa263cbf0ac002bdb26239ce36d5dc2a1b6251fd
|
[
"Apache-2.0"
] | 21
|
2017-08-20T03:01:05.000Z
|
2021-09-07T06:47:51.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from airflow import conf
from airflow.upgrade.rules.base_rule import BaseRule
from airflow.utils.dag_processing import list_py_file_paths
class AirflowMacroPluginRemovedRule(BaseRule):
title = "Remove airflow.AirflowMacroPlugin class"
description = "The airflow.AirflowMacroPlugin class has been removed."
MACRO_PLUGIN_CLASS = "airflow.AirflowMacroPlugin"
def _change_info(self, file_path, line_number):
return "{} will be removed. Affected file: {} (line {})".format(
self.MACRO_PLUGIN_CLASS, file_path, line_number
)
def _check_file(self, file_path):
problems = []
class_name_to_check = self.MACRO_PLUGIN_CLASS.split(".")[-1]
with open(file_path, "r") as file_pointer:
try:
for line_number, line in enumerate(file_pointer, 1):
if class_name_to_check in line:
problems.append(self._change_info(file_path, line_number))
except UnicodeDecodeError:
problems.append("Unable to read python file {}".format(file_path))
return problems
def check(self):
dag_folder = conf.get("core", "dags_folder")
file_paths = list_py_file_paths(directory=dag_folder, include_examples=False)
problems = []
for file_path in file_paths:
if not file_path.endswith(".py"):
continue
problems.extend(self._check_file(file_path))
return problems
| 39.237288
| 85
| 0.700648
|
c73facaa61f713c6e6a4f9a286d22a3b1e67d527
| 7,671
|
py
|
Python
|
setup.py
|
methane/python-doc-ja.sphinx-ja
|
b911f63318ac715733b5c10dfbc197549c85044f
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
methane/python-doc-ja.sphinx-ja
|
b911f63318ac715733b5c10dfbc197549c85044f
|
[
"BSD-2-Clause"
] | null | null | null |
setup.py
|
methane/python-doc-ja.sphinx-ja
|
b911f63318ac715733b5c10dfbc197549c85044f
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
try:
from setuptools import setup, find_packages
except ImportError:
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup, find_packages
import os
import sys
from distutils import log
import sphinx
long_desc = '''
Sphinx is a tool that makes it easy to create intelligent and beautiful
documentation for Python projects (or other documents consisting of multiple
reStructuredText sources), written by Georg Brandl. It was originally created
for the new Python documentation, and has excellent facilities for Python
project documentation, but C/C++ is supported as well, and more languages are
planned.
Sphinx uses reStructuredText as its markup language, and many of its strengths
come from the power and straightforwardness of reStructuredText and its parsing
and translating suite, the Docutils.
Among its features are the following:
* Output formats: HTML (including derivative formats such as HTML Help, Epub
and Qt Help), plain text, manual pages and LaTeX or direct PDF output
using rst2pdf
* Extensive cross-references: semantic markup and automatic links
for functions, classes, glossary terms and similar pieces of information
* Hierarchical structure: easy definition of a document tree, with automatic
links to siblings, parents and children
* Automatic indices: general index as well as a module index
* Code handling: automatic highlighting using the Pygments highlighter
* Flexible HTML output using the Jinja 2 templating engine
* Various extensions are available, e.g. for automatic testing of snippets
and inclusion of appropriately formatted docstrings
* Setuptools integration
A development egg can be found `here
<http://bitbucket.org/birkenfeld/sphinx/get/tip.gz#egg=Sphinx-dev>`_.
'''
requires = ['Pygments>=0.8', 'Jinja2>=2.2', 'docutils>=0.5']
if sys.version_info < (2, 4):
print('ERROR: Sphinx requires at least Python 2.4 to run.')
sys.exit(1)
if sys.version_info < (2, 5):
# Python 2.4's distutils doesn't automatically install an egg-info,
# so an existing docutils install won't be detected -- in that case,
# remove the dependency from setup.py
try:
import docutils
if int(docutils.__version__[2]) < 4:
raise ValueError('docutils not recent enough')
except:
pass
else:
del requires[-1]
# The uuid module is new in the stdlib in 2.5
requires.append('uuid>=1.30')
# Provide a "compile_catalog" command that also creates the translated
# JavaScript files if Babel is available.
cmdclass = {}
try:
from babel.messages.pofile import read_po
from babel.messages.frontend import compile_catalog
try:
from simplejson import dump
except ImportError:
from json import dump
except ImportError:
pass
else:
class compile_catalog_plusjs(compile_catalog):
"""
An extended command that writes all message strings that occur in
JavaScript files to a JavaScript file along with the .mo file.
Unfortunately, babel's setup command isn't built very extensible, so
most of the run() code is duplicated here.
"""
def run(self):
compile_catalog.run(self)
po_files = []
js_files = []
if not self.input_file:
if self.locale:
po_files.append((self.locale,
os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.po')))
js_files.append(os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.js'))
else:
for locale in os.listdir(self.directory):
po_file = os.path.join(self.directory, locale,
'LC_MESSAGES',
self.domain + '.po')
if os.path.exists(po_file):
po_files.append((locale, po_file))
js_files.append(os.path.join(self.directory, locale,
'LC_MESSAGES',
self.domain + '.js'))
else:
po_files.append((self.locale, self.input_file))
if self.output_file:
js_files.append(self.output_file)
else:
js_files.append(os.path.join(self.directory, self.locale,
'LC_MESSAGES',
self.domain + '.js'))
for js_file, (locale, po_file) in zip(js_files, po_files):
infile = open(po_file, 'r')
try:
catalog = read_po(infile, locale)
finally:
infile.close()
if catalog.fuzzy and not self.use_fuzzy:
continue
log.info('writing JavaScript strings in catalog %r to %r',
po_file, js_file)
jscatalog = {}
for message in catalog:
if any(x[0].endswith('.js') for x in message.locations):
msgid = message.id
if isinstance(msgid, (list, tuple)):
msgid = msgid[0]
jscatalog[msgid] = message.string
outfile = open(js_file, 'wb')
try:
outfile.write('Documentation.addTranslations(');
dump(dict(
messages=jscatalog,
plural_expr=catalog.plural_expr,
locale=str(catalog.locale)
), outfile)
outfile.write(');')
finally:
outfile.close()
cmdclass['compile_catalog'] = compile_catalog_plusjs
setup(
name='Sphinx',
version=sphinx.__version__,
url='http://sphinx.pocoo.org/',
download_url='http://pypi.python.org/pypi/Sphinx',
license='BSD',
author='Georg Brandl',
author_email='georg@python.org',
description='Python documentation generator',
long_description=long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: Education',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Topic :: Documentation',
'Topic :: Text Processing',
'Topic :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'sphinx-build = sphinx:main',
'sphinx-quickstart = sphinx.quickstart:main',
'sphinx-apidoc = sphinx.apidoc:main',
'sphinx-autogen = sphinx.ext.autosummary.generate:main',
],
'distutils.commands': [
'build_sphinx = sphinx.setup_command:BuildDoc',
],
},
install_requires=requires,
cmdclass=cmdclass,
use_2to3=True,
use_2to3_fixers=['custom_fixers'],
)
| 36.879808
| 80
| 0.575805
|
db4830eab6f19d2bed26fa392b50e3d68e0f8b44
| 4,853
|
py
|
Python
|
benchmark/startQiskit_noisy1786.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy1786.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startQiskit_noisy1786.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=5
# total number=70
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[1]) # number=29
prog.cz(input_qubit[3],input_qubit[1]) # number=30
prog.h(input_qubit[1]) # number=31
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=38
prog.cz(input_qubit[1],input_qubit[0]) # number=39
prog.h(input_qubit[0]) # number=40
prog.h(input_qubit[0]) # number=51
prog.cz(input_qubit[1],input_qubit[0]) # number=52
prog.h(input_qubit[0]) # number=53
prog.h(input_qubit[0]) # number=64
prog.cz(input_qubit[1],input_qubit[0]) # number=65
prog.h(input_qubit[0]) # number=66
prog.x(input_qubit[0]) # number=49
prog.h(input_qubit[0]) # number=57
prog.cz(input_qubit[1],input_qubit[0]) # number=58
prog.h(input_qubit[0]) # number=59
prog.h(input_qubit[0]) # number=54
prog.cz(input_qubit[1],input_qubit[0]) # number=55
prog.h(input_qubit[0]) # number=56
prog.h(input_qubit[4]) # number=41
prog.h(input_qubit[0]) # number=61
prog.cz(input_qubit[1],input_qubit[0]) # number=62
prog.h(input_qubit[0]) # number=63
prog.cx(input_qubit[0],input_qubit[1]) # number=67
prog.x(input_qubit[1]) # number=68
prog.cx(input_qubit[0],input_qubit[1]) # number=69
prog.h(input_qubit[2]) # number=25
prog.cz(input_qubit[0],input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=27
prog.x(input_qubit[2]) # number=23
prog.cx(input_qubit[0],input_qubit[2]) # number=24
prog.cx(input_qubit[0],input_qubit[3]) # number=32
prog.x(input_qubit[3]) # number=33
prog.h(input_qubit[3]) # number=42
prog.cz(input_qubit[0],input_qubit[3]) # number=43
prog.h(input_qubit[3]) # number=44
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.rx(0.6157521601035993,input_qubit[1]) # number=60
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = FakeVigo()
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1786.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 33.239726
| 82
| 0.60952
|
3e679b5e9375139c1a637104c57e68ab8ec01244
| 1,796
|
py
|
Python
|
dao/ToLakeExerciseBrkdn.py
|
mbromberek/mdb_site
|
89cb0ed87071287a947aee8f1022668c78a3f59b
|
[
"BSD-3-Clause"
] | null | null | null |
dao/ToLakeExerciseBrkdn.py
|
mbromberek/mdb_site
|
89cb0ed87071287a947aee8f1022668c78a3f59b
|
[
"BSD-3-Clause"
] | null | null | null |
dao/ToLakeExerciseBrkdn.py
|
mbromberek/mdb_site
|
89cb0ed87071287a947aee8f1022668c78a3f59b
|
[
"BSD-3-Clause"
] | null | null | null |
#! /Users/mikeyb/Applications/python3
# -*- coding: utf-8 -*-
'''
BSD 3-Clause License
Copyright (c) 2020, Mike Bromberek
All rights reserved.
'''
'''
Used for Insert, Update, Delete for LAKE.EXERCISE_SHEET table
'''
# First party classes
import os, sys
import logging
import logging.config
import copy
# Third party classes
import psycopg2
# Custom classes
import dao.WrktCmnDAO as cmnDAO
import dao.ReadLakeExerciseBrkdn as readEx
def writeExercises(dbConfig, exLst):
cur = ''
conn = ''
try:
conn, cur = cmnDAO.getConnection(dbConfig)
insrtCt, updtCt = writeExerciseAll(cur, copy.deepcopy(exLst))
conn.commit();
return insrtCt
except (Exception, psycopg2.DatabaseError) as error:
logger.error(error)
return -1
finally:
cmnDAO.closeConnection(cur, conn)
def writeExerciseAll(cur, exLst):
insrtCt = 0
updtCt = 0
for ex in exLst:
columns = ex.keys()
values = [ex[column] for column in columns]
recordExist = readEx.exerciseExists(cur, ex['wrkt_dt'], ex['wrkt_typ'])
if recordExist:
killExercise(cur, ex['wrkt_dt'], ex['wrkt_typ'])
isrtStmt = 'insert into lake.exercise_brkdn (%s) values %s'
logger.debug(cur.mogrify(isrtStmt, (psycopg2.extensions.AsIs(','.join(columns)), tuple(values))))
cur.execute(isrtStmt, (psycopg2.extensions.AsIs(','.join(columns)), tuple(values)))
if recordExist:
insrtCt = insrtCt +1
else:
updtCt = updtCt +1
return insrtCt, updtCt
def killExercise(cur, wrkt_dt, wrkt_typ):
deleteQry = 'delete from lake.exercise_brkdn where wrkt_dt = %s and wrkt_typ = %s'
cur.execute(deleteQry, (wrkt_dt,wrkt_typ,))
rowsDeleted = cur.rowcount
return rowsDeleted
| 28.0625
| 105
| 0.66147
|
1b8a1bfb5739e22ddc06770568245b1e946d7bbe
| 3,086
|
py
|
Python
|
LEDtester/LEDsimulator.py
|
crotty-d/led-tester
|
f6e91bd7a5512896d434ad586e32b6ac01ff7f3f
|
[
"MIT"
] | null | null | null |
LEDtester/LEDsimulator.py
|
crotty-d/led-tester
|
f6e91bd7a5512896d434ad586e32b6ac01ff7f3f
|
[
"MIT"
] | null | null | null |
LEDtester/LEDsimulator.py
|
crotty-d/led-tester
|
f6e91bd7a5512896d434ad586e32b6ac01ff7f3f
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import numpy as np
import re
class LEDgrid:
"""
Class to represent grid of LEDs that be turned on and off in response to certain instructions
"""
# Class variables
lights = None
# Constructor
def __init__(self, L):
"""Creates an instance of the LED light grid"""
self.lights = np.zeros((L,L), np.int8)
# Methods
def apply(self, instruction):
"""Apply an instruction to the grid, turning on/off lights as specified"""
# Parse instruction via regular expressions
pattern = re.compile(".*(turn on|turn off|switch)\s*([+-]?\d+)\s*,\s*([+-]?\d+)\s*through\s*([+-]?\d+)\s*,\s*([+-]?\d+).*")
parsed = pattern.match(instruction)
# Apply instruction if parsing produced a valid command and coordinates
if parsed != None:
# Assign the command to apply and the coordinates of the effected lights
parts = parsed.groups()
cmd = parts[0]
coords = []
for p in parts[1:]:
p = int(p)
if p >= 0:
coords.append(min(p, self.lights.shape[0] - 1)) # min used in case instruction out of grid bounds
else:
coords.append(0) # negative always outside bounds
x1, y1, x2, y2 = min(coords[0], coords[2]), min(coords[1], coords[3]), max(coords[0], coords[2]), max(coords[1], coords[3])
# Apply command to grid of lights
if cmd == 'turn on':
self.lights[y1:y2+1, x1:x2+1] = 1 # ranges are inclusive, hence +1
return 0
elif cmd == 'turn off':
self.lights[y1:y2+1, x1:x2+1] = 0
return 0
elif cmd == 'switch':
# Get indices of lights that are off (0) and then those that are on (1)
idx_zeros = np.where(self.lights[y1:y2+1, x1:x2+1] == 0)
idx_ones = np.where(self.lights[y1:y2+1, x1:x2+1] == 1)
idx0_offset = (idx_zeros[0] + y1, idx_zeros[1] + x1)
idx1_offset = (idx_ones[0] + y1, idx_ones[1] + x1)
# Switch them to opposite value
self.lights[idx0_offset] = 1
self.lights[idx1_offset] = 0
# Alternative simple iterative method (much slower)
# for x in range(y1, y2+1):
# for x in range(x1, x2+1):
# if self.lights[y, x] == 0:
# self.lights[y, x] = 1
# else:
# self.lights[x, y] = 0
return 0
else:
# There should be no other possibility here, but just in case invalid cmd slips through
return 1
else:
return 1
def count(self):
"""Returns the number of lights currently turned on (1)"""
return self.lights.sum()
| 38.098765
| 135
| 0.488983
|
cae1d2159607b39b7ba471be4bfd0a9a9f027171
| 115
|
py
|
Python
|
Cal_tool/web/cal_tool/calendar/tests/test_datetime.py
|
Ale763/PSOPV_GROEP3_CALTOOL
|
d79d7fa5e680aafa05564b0d21c9a5e079170214
|
[
"MIT"
] | null | null | null |
Cal_tool/web/cal_tool/calendar/tests/test_datetime.py
|
Ale763/PSOPV_GROEP3_CALTOOL
|
d79d7fa5e680aafa05564b0d21c9a5e079170214
|
[
"MIT"
] | null | null | null |
Cal_tool/web/cal_tool/calendar/tests/test_datetime.py
|
Ale763/PSOPV_GROEP3_CALTOOL
|
d79d7fa5e680aafa05564b0d21c9a5e079170214
|
[
"MIT"
] | null | null | null |
import datetime
now = datetime.datetime.now()
print(now)
future = now + datetime.timedelta(weeks=1)
print(future)
| 16.428571
| 42
| 0.756522
|
08cbdf539e479d164a7a18fe653258bb8b617c6e
| 9,323
|
py
|
Python
|
python/how-tos/05-issue-credential/issue_credential.py
|
Decker-Russell/Indy-dev
|
8e08dd4f538d59d41e6ca5dcdf5b577c687c48fd
|
[
"Apache-2.0"
] | null | null | null |
python/how-tos/05-issue-credential/issue_credential.py
|
Decker-Russell/Indy-dev
|
8e08dd4f538d59d41e6ca5dcdf5b577c687c48fd
|
[
"Apache-2.0"
] | null | null | null |
python/how-tos/05-issue-credential/issue_credential.py
|
Decker-Russell/Indy-dev
|
8e08dd4f538d59d41e6ca5dcdf5b577c687c48fd
|
[
"Apache-2.0"
] | null | null | null |
"""
This sample is extensions of "write_schema_and_cred_def.py"
Shows how to issue a credential as a Trust Anchor which has created a Cred Definition
for an existing Schema.
After Trust Anchor has successfully created and stored a Cred Definition using Anonymous Credentials,
Prover's wallet is created and opened, and used to generate Prover's Master Secret.
After that, Trust Anchor generates Credential Offer for given Cred Definition, using Prover's DID
Prover uses Credential Offer to create Credential Request
Trust Anchor then uses Prover's Credential Request to issue a Credential.
Finally, Prover stores Credential in its wallet.
"""
import asyncio
import json
import pprint
from indy import pool, ledger, wallet, did, anoncreds
from indy.error import IndyError
from src.utils import get_pool_genesis_txn_path, run_coroutine, PROTOCOL_VERSION
pool_name = 'pool1'
logger.info("Open Pool Ledger: {}".format(pool_name))
pool_genesis_txn_path = get_pool_genesis_txn_path(pool_name)
wallet_config = json.dumps({"id": "wallet"})
wallet_credentials = json.dumps({"key": "wallet_key"})
PROTOCOL_VERSION=2
def print_log(value_color="", value_noncolor=""):
"""set the colors for text."""
HEADER = '\033[92m'
ENDC = '\033[0m'
print(HEADER + value_color + ENDC + str(value_noncolor))
async def issue_credential():
try:
await pool.set_protocol_version(2)
# 1.
print_log('\n1. Creates a new local pool ledger configuration that is used '
'later when connecting to ledger.\n')
pool_config = json.dumps({'genesis_txn': genesis_file_path})
try:
await pool.create_pool_ledger_config(pool_name, pool_config)
except IndyError:
await pool.delete_pool_ledger_config(config_name=pool_name)
await pool.create_pool_ledger_config(pool_name, pool_config)
# 2.
print_log('\n2. Open pool ledger and get handle from libindy\n')
pool_handle = await pool.open_pool_ledger(config_name=pool_name, config=None)
# 3.
print_log('\n3. Creating new secure wallet\n')
try:
await wallet.create_wallet(wallet_config, wallet_credentials)
except IndyError:
await wallet.delete_wallet(wallet_config, wallet_credentials)
await wallet.create_wallet(wallet_config, wallet_credentials)
# 4.
print_log('\n4. Open wallet and get handle from libindy\n')
wallet_handle = await wallet.open_wallet(wallet_config, wallet_credentials)
# 5.
print_log('\n5. Generating and storing steward DID and verkey\n')
steward_seed = '000000000000000000000000Steward1'
did_json = json.dumps({'seed': steward_seed})
steward_did, steward_verkey = await did.create_and_store_my_did(wallet_handle, did_json)
print_log('Steward DID: ', steward_did)
print_log('Steward Verkey: ', steward_verkey)
# 6.
print_log('\n6. Generating and storing trust anchor DID and verkey\n')
trust_anchor_did, trust_anchor_verkey = await did.create_and_store_my_did(wallet_handle, "{}")
print_log('Trust anchor DID: ', trust_anchor_did)
print_log('Trust anchor Verkey: ', trust_anchor_verkey)
# 7.
print_log('\n7. Building NYM request to add Trust Anchor to the ledger\n')
nym_transaction_request = await ledger.build_nym_request(submitter_did=steward_did,
target_did=trust_anchor_did,
ver_key=trust_anchor_verkey,
alias=None,
role='TRUST_ANCHOR')
print_log('NYM transaction request: ')
pprint.pprint(json.loads(nym_transaction_request))
# 8.
print_log('\n8. Sending NYM request to the ledger\n')
nym_transaction_response = await ledger.sign_and_submit_request(pool_handle=pool_handle,
wallet_handle=wallet_handle,
submitter_did=steward_did,
request_json=nym_transaction_request)
print_log('NYM transaction response: ')
pprint.pprint(json.loads(nym_transaction_response))
# 9.
print_log('\n9. Build the SCHEMA request to add new schema to the ledger as a Steward\n')
seq_no = 1
schema = {
'seqNo': seq_no,
'dest': steward_did,
'data': {
'id': '1',
'name': 'gvt',
'version': '1.0',
'ver': '1.0',
'attrNames': ['age', 'sex', 'height', 'name']
}
}
schema_data = schema['data']
print_log('Schema data: ')
pprint.pprint(schema_data)
print_log('Schema: ')
pprint.pprint(schema)
schema_request = await ledger.build_schema_request(steward_did, json.dumps(schema_data))
print_log('Schema request: ')
pprint.pprint(json.loads(schema_request))
# 10.
print_log('\n10. Sending the SCHEMA request to the ledger\n')
schema_response = await ledger.sign_and_submit_request(pool_handle, wallet_handle, steward_did, schema_request)
print_log('Schema response:')
pprint.pprint(json.loads(schema_response))
# 11.
print_log('\n11. Creating and storing CRED DEFINITION using anoncreds as Trust Anchor, for the given Schema\n')
cred_def_tag = 'cred_def_tag'
cred_def_type = 'CL'
cred_def_config = json.dumps({"support_revocation": False})
(cred_def_id, cred_def_json) = await anoncreds.issuer_create_and_store_credential_def(wallet_handle, trust_anchor_did, json.dumps(schema_data),
cred_def_tag, cred_def_type, cred_def_config)
print_log('Credential definition: ')
pprint.pprint(json.loads(cred_def_json))
# 12.
print_log('\n12. Creating Prover wallet and opening it to get the handle\n')
prover_did = 'VsKV7grR1BUE29mG2Fm2kX'
prover_wallet_config = json.dumps({"id": "prover_wallet"})
prover_wallet_credentials = json.dumps({"key": "prover_wallet_key"})
await wallet.create_wallet(prover_wallet_config, prover_wallet_credentials)
prover_wallet_handle = await wallet.open_wallet(prover_wallet_config, prover_wallet_credentials)
# 13.
print_log('\n13. Prover is creating Master Secret\n')
master_secret_name = 'master_secret'
master_secret_id = await anoncreds.prover_create_master_secret(prover_wallet_handle, master_secret_name)
# 14.
print_log('\n14. Issuer (Trust Anchor) is creating a Credential Offer for Prover\n')
schema_json = json.dumps(schema)
cred_offer_json = await anoncreds.issuer_create_credential_offer(wallet_handle, cred_def_id)
print_log('Credential Offer: ')
pprint.pprint(json.loads(cred_offer_json))
# 15.
print_log('\n15. Prover creates Credential Request for the given credential offer\n')
(cred_req_json, cred_req_metadata_json) = await anoncreds.prover_create_credential_req(prover_wallet_handle, prover_did, cred_offer_json, cred_def_json, master_secret_id)
print_log('Credential Request: ')
pprint.pprint(json.loads(cred_req_json))
# 16.
print_log('\n16. Issuer (Trust Anchor) creates Credential for Credential Request\n')
cred_values_json = json.dumps({
'sex': ['male', '5944657099558967239210949258394887428692050081607692519917050011144233115103'],
'name': ['Alex', '1139481716457488690172217916278103335'],
'height': ['175', '175'],
'age': ['28', '28']
})
(cred_json, _, _) = await anoncreds.issuer_create_credential(wallet_handle, cred_offer_json, cred_req_json, cred_values_json, None, None)
print_log('Credential: ')
pprint.pprint(json.loads(cred_json))
# 17.
print_log('\n17. Prover processes and stores Credential\n')
await anoncreds.prover_store_credential(prover_wallet_handle, None, cred_req_metadata_json, cred_json, cred_def_json, None)
# 18.
print_log('\n18. Closing both wallet_handles and pool\n')
await wallet.close_wallet(wallet_handle)
await wallet.close_wallet(prover_wallet_handle)
await pool.close_pool_ledger(pool_handle)
# 19.
print_log('\n19. Deleting created wallet_handles\n')
await wallet.delete_wallet(wallet_config, wallet_credentials)
await wallet.delete_wallet(prover_wallet_config, prover_wallet_credentials)
# 20.
print_log('\n20. Deleting pool ledger config\n')
await pool.delete_pool_ledger_config(pool_name)
except IndyError as e:
print('Error occurred: %s' % e)
def main():
loop = asyncio.get_event_loop()
loop.run_until_complete(issue_credential())
loop.close()
if __name__ == '__main__':
main()
| 44.184834
| 178
| 0.648396
|
71f5ecfbb0a3b96527c082c26dbe0d17a75cfa68
| 17,054
|
py
|
Python
|
front-end/testsuite-python-lib/Python-3.1/Lib/test/test_zlib.py
|
MalloyPower/parsing-python
|
b2bca5eed07ea2af7a2001cd4f63becdfb0570be
|
[
"MIT"
] | 1
|
2020-11-26T18:53:46.000Z
|
2020-11-26T18:53:46.000Z
|
front-end/testsuite-python-lib/Python-3.1/Lib/test/test_zlib.py
|
MalloyPower/parsing-python
|
b2bca5eed07ea2af7a2001cd4f63becdfb0570be
|
[
"MIT"
] | null | null | null |
front-end/testsuite-python-lib/Python-3.1/Lib/test/test_zlib.py
|
MalloyPower/parsing-python
|
b2bca5eed07ea2af7a2001cd4f63becdfb0570be
|
[
"MIT"
] | 1
|
2019-04-11T11:27:01.000Z
|
2019-04-11T11:27:01.000Z
|
import unittest
from test import support
import binascii
import random
zlib = support.import_module('zlib')
class ChecksumTestCase(unittest.TestCase):
# checksum test cases
def test_crc32start(self):
self.assertEqual(zlib.crc32(b""), zlib.crc32(b"", 0))
self.assert_(zlib.crc32(b"abc", 0xffffffff))
def test_crc32empty(self):
self.assertEqual(zlib.crc32(b"", 0), 0)
self.assertEqual(zlib.crc32(b"", 1), 1)
self.assertEqual(zlib.crc32(b"", 432), 432)
def test_adler32start(self):
self.assertEqual(zlib.adler32(b""), zlib.adler32(b"", 1))
self.assert_(zlib.adler32(b"abc", 0xffffffff))
def test_adler32empty(self):
self.assertEqual(zlib.adler32(b"", 0), 0)
self.assertEqual(zlib.adler32(b"", 1), 1)
self.assertEqual(zlib.adler32(b"", 432), 432)
def assertEqual32(self, seen, expected):
# 32-bit values masked -- checksums on 32- vs 64- bit machines
# This is important if bit 31 (0x08000000L) is set.
self.assertEqual(seen & 0x0FFFFFFFF, expected & 0x0FFFFFFFF)
def test_penguins(self):
self.assertEqual32(zlib.crc32(b"penguin", 0), 0x0e5c1a120)
self.assertEqual32(zlib.crc32(b"penguin", 1), 0x43b6aa94)
self.assertEqual32(zlib.adler32(b"penguin", 0), 0x0bcf02f6)
self.assertEqual32(zlib.adler32(b"penguin", 1), 0x0bd602f7)
self.assertEqual(zlib.crc32(b"penguin"), zlib.crc32(b"penguin", 0))
self.assertEqual(zlib.adler32(b"penguin"),zlib.adler32(b"penguin",1))
def test_crc32_adler32_unsigned(self):
foo = 'abcdefghijklmnop'
# explicitly test signed behavior
self.assertEqual(zlib.crc32(foo), 2486878355)
self.assertEqual(zlib.crc32('spam'), 1138425661)
self.assertEqual(zlib.adler32(foo+foo), 3573550353)
self.assertEqual(zlib.adler32('spam'), 72286642)
def test_same_as_binascii_crc32(self):
foo = b'abcdefghijklmnop'
crc = 2486878355
self.assertEqual(binascii.crc32(foo), crc)
self.assertEqual(zlib.crc32(foo), crc)
self.assertEqual(binascii.crc32(b'spam'), zlib.crc32(b'spam'))
class ExceptionTestCase(unittest.TestCase):
# make sure we generate some expected errors
def test_badlevel(self):
# specifying compression level out of range causes an error
# (but -1 is Z_DEFAULT_COMPRESSION and apparently the zlib
# accepts 0 too)
self.assertRaises(zlib.error, zlib.compress, 'ERROR', 10)
def test_badcompressobj(self):
# verify failure on building compress object with bad params
self.assertRaises(ValueError, zlib.compressobj, 1, zlib.DEFLATED, 0)
# specifying total bits too large causes an error
self.assertRaises(ValueError,
zlib.compressobj, 1, zlib.DEFLATED, zlib.MAX_WBITS + 1)
def test_baddecompressobj(self):
# verify failure on building decompress object with bad params
self.assertRaises(ValueError, zlib.decompressobj, 0)
def test_decompressobj_badflush(self):
# verify failure on calling decompressobj.flush with bad params
self.assertRaises(ValueError, zlib.decompressobj().flush, 0)
self.assertRaises(ValueError, zlib.decompressobj().flush, -1)
class CompressTestCase(unittest.TestCase):
# Test compression in one go (whole message compression)
def test_speech(self):
x = zlib.compress(HAMLET_SCENE)
self.assertEqual(zlib.decompress(x), HAMLET_SCENE)
def test_speech128(self):
# compress more data
data = HAMLET_SCENE * 128
x = zlib.compress(data)
self.assertEqual(zlib.decompress(x), data)
class CompressObjectTestCase(unittest.TestCase):
# Test compression object
def test_pair(self):
# straightforward compress/decompress objects
data = HAMLET_SCENE * 128
co = zlib.compressobj()
x1 = co.compress(data)
x2 = co.flush()
self.assertRaises(zlib.error, co.flush) # second flush should not work
dco = zlib.decompressobj()
y1 = dco.decompress(x1 + x2)
y2 = dco.flush()
self.assertEqual(data, y1 + y2)
self.assert_(isinstance(dco.unconsumed_tail, bytes))
self.assert_(isinstance(dco.unused_data, bytes))
def test_compressoptions(self):
# specify lots of options to compressobj()
level = 2
method = zlib.DEFLATED
wbits = -12
memlevel = 9
strategy = zlib.Z_FILTERED
co = zlib.compressobj(level, method, wbits, memlevel, strategy)
x1 = co.compress(HAMLET_SCENE)
x2 = co.flush()
dco = zlib.decompressobj(wbits)
y1 = dco.decompress(x1 + x2)
y2 = dco.flush()
self.assertEqual(HAMLET_SCENE, y1 + y2)
def test_compressincremental(self):
# compress object in steps, decompress object as one-shot
data = HAMLET_SCENE * 128
co = zlib.compressobj()
bufs = []
for i in range(0, len(data), 256):
bufs.append(co.compress(data[i:i+256]))
bufs.append(co.flush())
combuf = b''.join(bufs)
dco = zlib.decompressobj()
y1 = dco.decompress(b''.join(bufs))
y2 = dco.flush()
self.assertEqual(data, y1 + y2)
def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):
# compress object in steps, decompress object in steps
source = source or HAMLET_SCENE
data = source * 128
co = zlib.compressobj()
bufs = []
for i in range(0, len(data), cx):
bufs.append(co.compress(data[i:i+cx]))
bufs.append(co.flush())
combuf = b''.join(bufs)
decombuf = zlib.decompress(combuf)
# Test type of return value
self.assert_(isinstance(decombuf, bytes))
self.assertEqual(data, decombuf)
dco = zlib.decompressobj()
bufs = []
for i in range(0, len(combuf), dcx):
bufs.append(dco.decompress(combuf[i:i+dcx]))
self.assertEqual(b'', dco.unconsumed_tail, ########
"(A) uct should be b'': not %d long" %
len(dco.unconsumed_tail))
self.assertEqual(b'', dco.unused_data)
if flush:
bufs.append(dco.flush())
else:
while True:
chunk = dco.decompress('')
if chunk:
bufs.append(chunk)
else:
break
self.assertEqual(b'', dco.unconsumed_tail, ########
"(B) uct should be b'': not %d long" %
len(dco.unconsumed_tail))
self.assertEqual(b'', dco.unused_data)
self.assertEqual(data, b''.join(bufs))
# Failure means: "decompressobj with init options failed"
def test_decompincflush(self):
self.test_decompinc(flush=True)
def test_decompimax(self, source=None, cx=256, dcx=64):
# compress in steps, decompress in length-restricted steps
source = source or HAMLET_SCENE
# Check a decompression object with max_length specified
data = source * 128
co = zlib.compressobj()
bufs = []
for i in range(0, len(data), cx):
bufs.append(co.compress(data[i:i+cx]))
bufs.append(co.flush())
combuf = b''.join(bufs)
self.assertEqual(data, zlib.decompress(combuf),
'compressed data failure')
dco = zlib.decompressobj()
bufs = []
cb = combuf
while cb:
#max_length = 1 + len(cb)//10
chunk = dco.decompress(cb, dcx)
self.failIf(len(chunk) > dcx,
'chunk too big (%d>%d)' % (len(chunk), dcx))
bufs.append(chunk)
cb = dco.unconsumed_tail
bufs.append(dco.flush())
self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
def test_decompressmaxlen(self, flush=False):
# Check a decompression object with max_length specified
data = HAMLET_SCENE * 128
co = zlib.compressobj()
bufs = []
for i in range(0, len(data), 256):
bufs.append(co.compress(data[i:i+256]))
bufs.append(co.flush())
combuf = b''.join(bufs)
self.assertEqual(data, zlib.decompress(combuf),
'compressed data failure')
dco = zlib.decompressobj()
bufs = []
cb = combuf
while cb:
max_length = 1 + len(cb)//10
chunk = dco.decompress(cb, max_length)
self.failIf(len(chunk) > max_length,
'chunk too big (%d>%d)' % (len(chunk),max_length))
bufs.append(chunk)
cb = dco.unconsumed_tail
if flush:
bufs.append(dco.flush())
else:
while chunk:
chunk = dco.decompress('', max_length)
self.failIf(len(chunk) > max_length,
'chunk too big (%d>%d)' % (len(chunk),max_length))
bufs.append(chunk)
self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')
def test_decompressmaxlenflush(self):
self.test_decompressmaxlen(flush=True)
def test_maxlenmisc(self):
# Misc tests of max_length
dco = zlib.decompressobj()
self.assertRaises(ValueError, dco.decompress, "", -1)
self.assertEqual(b'', dco.unconsumed_tail)
def test_flushes(self):
# Test flush() with the various options, using all the
# different levels in order to provide more variations.
sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH']
sync_opt = [getattr(zlib, opt) for opt in sync_opt
if hasattr(zlib, opt)]
data = HAMLET_SCENE * 8
for sync in sync_opt:
for level in range(10):
obj = zlib.compressobj( level )
a = obj.compress( data[:3000] )
b = obj.flush( sync )
c = obj.compress( data[3000:] )
d = obj.flush()
self.assertEqual(zlib.decompress(b''.join([a,b,c,d])),
data, ("Decompress failed: flush "
"mode=%i, level=%i") % (sync, level))
del obj
def test_odd_flush(self):
# Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1
import random
if hasattr(zlib, 'Z_SYNC_FLUSH'):
# Testing on 17K of "random" data
# Create compressor and decompressor objects
co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
dco = zlib.decompressobj()
# Try 17K of data
# generate random data stream
try:
# In 2.3 and later, WichmannHill is the RNG of the bug report
gen = random.WichmannHill()
except AttributeError:
try:
# 2.2 called it Random
gen = random.Random()
except AttributeError:
# others might simply have a single RNG
gen = random
gen.seed(1)
data = genblock(1, 17 * 1024, generator=gen)
# compress, sync-flush, and decompress
first = co.compress(data)
second = co.flush(zlib.Z_SYNC_FLUSH)
expanded = dco.decompress(first + second)
# if decompressed data is different from the input data, choke.
self.assertEqual(expanded, data, "17K random source doesn't match")
def test_empty_flush(self):
# Test that calling .flush() on unused objects works.
# (Bug #1083110 -- calling .flush() on decompress objects
# caused a core dump.)
co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
self.failUnless(co.flush()) # Returns a zlib header
dco = zlib.decompressobj()
self.assertEqual(dco.flush(), b"") # Returns nothing
if hasattr(zlib.compressobj(), "copy"):
def test_compresscopy(self):
# Test copying a compression object
data0 = HAMLET_SCENE
data1 = bytes(str(HAMLET_SCENE, "ascii").swapcase(), "ascii")
c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)
bufs0 = []
bufs0.append(c0.compress(data0))
c1 = c0.copy()
bufs1 = bufs0[:]
bufs0.append(c0.compress(data0))
bufs0.append(c0.flush())
s0 = b''.join(bufs0)
bufs1.append(c1.compress(data1))
bufs1.append(c1.flush())
s1 = b''.join(bufs1)
self.assertEqual(zlib.decompress(s0),data0+data0)
self.assertEqual(zlib.decompress(s1),data0+data1)
def test_badcompresscopy(self):
# Test copying a compression object in an inconsistent state
c = zlib.compressobj()
c.compress(HAMLET_SCENE)
c.flush()
self.assertRaises(ValueError, c.copy)
if hasattr(zlib.decompressobj(), "copy"):
def test_decompresscopy(self):
# Test copying a decompression object
data = HAMLET_SCENE
comp = zlib.compress(data)
# Test type of return value
self.assert_(isinstance(comp, bytes))
d0 = zlib.decompressobj()
bufs0 = []
bufs0.append(d0.decompress(comp[:32]))
d1 = d0.copy()
bufs1 = bufs0[:]
bufs0.append(d0.decompress(comp[32:]))
s0 = b''.join(bufs0)
bufs1.append(d1.decompress(comp[32:]))
s1 = b''.join(bufs1)
self.assertEqual(s0,s1)
self.assertEqual(s0,data)
def test_baddecompresscopy(self):
# Test copying a compression object in an inconsistent state
data = zlib.compress(HAMLET_SCENE)
d = zlib.decompressobj()
d.decompress(data)
d.flush()
self.assertRaises(ValueError, d.copy)
def genblock(seed, length, step=1024, generator=random):
"""length-byte stream of random data from a seed (in step-byte blocks)."""
if seed is not None:
generator.seed(seed)
randint = generator.randint
if length < step or step < 2:
step = length
blocks = bytes()
for i in range(0, length, step):
blocks += bytes(randint(0, 255) for x in range(step))
return blocks
def choose_lines(source, number, seed=None, generator=random):
"""Return a list of number lines randomly chosen from the source"""
if seed is not None:
generator.seed(seed)
sources = source.split('\n')
return [generator.choice(sources) for n in range(number)]
HAMLET_SCENE = b"""
LAERTES
O, fear me not.
I stay too long: but here my father comes.
Enter POLONIUS
A double blessing is a double grace,
Occasion smiles upon a second leave.
LORD POLONIUS
Yet here, Laertes! aboard, aboard, for shame!
The wind sits in the shoulder of your sail,
And you are stay'd for. There; my blessing with thee!
And these few precepts in thy memory
See thou character. Give thy thoughts no tongue,
Nor any unproportioned thought his act.
Be thou familiar, but by no means vulgar.
Those friends thou hast, and their adoption tried,
Grapple them to thy soul with hoops of steel;
But do not dull thy palm with entertainment
Of each new-hatch'd, unfledged comrade. Beware
Of entrance to a quarrel, but being in,
Bear't that the opposed may beware of thee.
Give every man thy ear, but few thy voice;
Take each man's censure, but reserve thy judgment.
Costly thy habit as thy purse can buy,
But not express'd in fancy; rich, not gaudy;
For the apparel oft proclaims the man,
And they in France of the best rank and station
Are of a most select and generous chief in that.
Neither a borrower nor a lender be;
For loan oft loses both itself and friend,
And borrowing dulls the edge of husbandry.
This above all: to thine ownself be true,
And it must follow, as the night the day,
Thou canst not then be false to any man.
Farewell: my blessing season this in thee!
LAERTES
Most humbly do I take my leave, my lord.
LORD POLONIUS
The time invites you; go; your servants tend.
LAERTES
Farewell, Ophelia; and remember well
What I have said to you.
OPHELIA
'Tis in my memory lock'd,
And you yourself shall keep the key of it.
LAERTES
Farewell.
"""
def test_main():
support.run_unittest(
ChecksumTestCase,
ExceptionTestCase,
CompressTestCase,
CompressObjectTestCase
)
if __name__ == "__main__":
unittest.main() # XXX
###test_main()
| 35.235537
| 79
| 0.593116
|
30a5a6d2f9ce2f833528ef42af425818236ab850
| 5,319
|
py
|
Python
|
docs/source/conf.py
|
robertovillarejo/DADS-Knowledge-Base
|
a7a2e18d5a2d416b2ea321ffdf020ad63ad6e450
|
[
"MIT"
] | null | null | null |
docs/source/conf.py
|
robertovillarejo/DADS-Knowledge-Base
|
a7a2e18d5a2d416b2ea321ffdf020ad63ad6e450
|
[
"MIT"
] | null | null | null |
docs/source/conf.py
|
robertovillarejo/DADS-Knowledge-Base
|
a7a2e18d5a2d416b2ea321ffdf020ad63ad6e450
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Base de conocimientos documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 21 17:58:30 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# Markdown support
from recommonmark.parser import CommonMarkParser
source_parsers = {
'.md': CommonMarkParser,
}
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Base de conocimientos'
copyright = u'2017, DADS Infotec'
author = u'DADS Infotec'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'es'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme_path = ["_themes", ]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Basedeconocimientosdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Basedeconocimientos.tex', u'Base de conocimientos Documentation',
u'DADS Infotec', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'basedeconocimientos', u'Base de conocimientos Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Basedeconocimientos', u'Base de conocimientos Documentation',
author, 'Basedeconocimientos', 'One line description of project.',
'Miscellaneous'),
]
| 30.050847
| 83
| 0.688475
|
294fd2ff2c76ea6a2e7624aa973c6f534b23b22a
| 1,382
|
py
|
Python
|
qtip/loader/qpi.py
|
hashnfv/hashnfv-qtip
|
2c79d3361fdb1fcbe67682f8a205011b3ccf5e72
|
[
"Apache-2.0"
] | 4
|
2016-11-27T19:21:48.000Z
|
2018-01-08T21:49:03.000Z
|
qtip/loader/qpi.py
|
hashnfv/hashnfv-qtip
|
2c79d3361fdb1fcbe67682f8a205011b3ccf5e72
|
[
"Apache-2.0"
] | null | null | null |
qtip/loader/qpi.py
|
hashnfv/hashnfv-qtip
|
2c79d3361fdb1fcbe67682f8a205011b3ccf5e72
|
[
"Apache-2.0"
] | 10
|
2017-03-19T07:38:40.000Z
|
2018-01-08T21:49:09.000Z
|
##############################################################################
# Copyright (c) 2016 ZTE Corp and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
##############################################################################
from yaml_file import YamlFileLoader
from qtip.base.constant import FormulaName
from qtip.base.constant import SpecProp
from qtip.util.formula import Formula
class QPISpec(YamlFileLoader):
"""
a QPI specification defines how to calculate a performance index from
collected metrics.
"""
RELATIVE_PATH = 'QPI'
def __init__(self, name, paths=None):
super(QPISpec, self).__init__(name, paths=paths)
content = self.content
self.formula = Formula(content.get(SpecProp.FORMULA, FormulaName.ARITHMETIC_MEAN))
self.sections = [Section(record, paths=paths)
for record in content[SpecProp.SECTIONS]]
class Section(object):
def __init__(self, content, paths=None):
self.name = content[SpecProp.NAME]
self.weight = content.get(SpecProp.WEIGHT, None)
self.formula = Formula(content.get(SpecProp.FORMULA, FormulaName.ARITHMETIC_MEAN))
| 37.351351
| 90
| 0.636758
|
5111f883330f1a04b0e67a9815c366827a930ced
| 634
|
py
|
Python
|
manage.py
|
prasenjitaich/HotelBookingApp
|
a53c54fdd70f2c00c1787af681587e752248ee7c
|
[
"MIT"
] | null | null | null |
manage.py
|
prasenjitaich/HotelBookingApp
|
a53c54fdd70f2c00c1787af681587e752248ee7c
|
[
"MIT"
] | null | null | null |
manage.py
|
prasenjitaich/HotelBookingApp
|
a53c54fdd70f2c00c1787af681587e752248ee7c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'booking_engine.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.818182
| 78
| 0.68612
|
e74dec0739b4a853542fc1a2f2afeb2ded1c4be9
| 814
|
py
|
Python
|
src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
|
mpminardi/grpc
|
ce9e6eeded3b9d4d7f17a5432bc79008929cb8b7
|
[
"Apache-2.0"
] | 36,552
|
2015-02-26T17:30:13.000Z
|
2022-03-31T22:41:33.000Z
|
src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
|
SanjanaSingh897/grpc
|
2d858866eb95ce5de8ccc8c35189a12733d8ca79
|
[
"Apache-2.0"
] | 23,536
|
2015-02-26T17:50:56.000Z
|
2022-03-31T23:39:42.000Z
|
src/python/grpcio_tests/tests_aio/_sanity/_sanity_test.py
|
SanjanaSingh897/grpc
|
2d858866eb95ce5de8ccc8c35189a12733d8ca79
|
[
"Apache-2.0"
] | 11,050
|
2015-02-26T17:22:10.000Z
|
2022-03-31T10:12:35.000Z
|
# Copyright 2019 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from tests._sanity import _sanity_test
class AioSanityTest(_sanity_test.SanityTest):
TEST_PKG_MODULE_NAME = 'tests_aio'
TEST_PKG_PATH = 'tests_aio'
if __name__ == '__main__':
unittest.main(verbosity=2)
| 29.071429
| 74
| 0.760442
|
289a223720e73741d95ff77a7faca61021f290b3
| 6,196
|
py
|
Python
|
bertrpc/client.py
|
mjrusso/python-bertrpc
|
312aba536925206c8e50a1da9f3e6c80d4580246
|
[
"MIT"
] | 15
|
2015-04-05T07:21:46.000Z
|
2021-01-25T01:46:25.000Z
|
bertrpc/client.py
|
mjrusso/python-bertrpc
|
312aba536925206c8e50a1da9f3e6c80d4580246
|
[
"MIT"
] | 1
|
2017-01-27T10:50:10.000Z
|
2017-01-27T10:50:10.000Z
|
bertrpc/client.py
|
mjrusso/python-bertrpc
|
312aba536925206c8e50a1da9f3e6c80d4580246
|
[
"MIT"
] | 5
|
2015-10-08T09:23:50.000Z
|
2021-01-25T01:46:09.000Z
|
import bert
import error
import socket
import struct
class Service(object):
def __init__(self, host, port, timeout = None):
self.host = host
self.port = port
self.timeout = timeout
def request(self, kind, options=None):
if kind in ['call', 'cast']:
self._verify_options(options)
return Request(self, bert.Atom(kind), options)
else:
raise error.InvalidRequest('unsupported request of kind: "%s"' % kind)
def _verify_options(self, options):
if options is not None:
cache = options.get('cache', None)
if cache is not None:
if len(cache) >= 2 and cache[0] == 'validation' and type(cache[1]) == type(str()):
pass
else:
raise error.InvalidOption('Valid cache args are [validation, String]')
else:
raise error.InvalidOption('Valid options are: cache')
class Request(object):
def __init__(self, service, kind, options):
self.service = service
self.kind = kind
self.options = options
def __getattr__(self, attr):
return Module(self.service, self, bert.Atom(attr))
class Module(object):
def __init__(self, service, request, module):
self.service = service
self.request = request
self.module = module
def __getattr__(self, attr):
def callable(*args, **kwargs):
return self.method_missing(attr, *args, **kwargs)
return callable
def method_missing(self, *args, **kwargs):
return Action(self.service,
self.request,
self.module,
bert.Atom(args[0]),
list(args[1:])).execute()
class Action(object):
def __init__(self, service, request, module, function, arguments):
self.service = service
self.request = request
self.module = module
self.function = function
self.arguments = arguments
def execute(self):
python_request = (self.request.kind,
self.module,
self.function,
self.arguments)
bert_request = Encoder().encode(python_request)
bert_response = self._transaction(bert_request)
python_response = Decoder().decode(bert_response)
return python_response
def _transaction(self, bert_request):
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.service.timeout is not None: sock.settimeout(self.service.timeout)
sock.connect((self.service.host, self.service.port))
if self.request.options is not None:
if self.request.options.get('cache', None) is not None:
if self.request.options['cache'][0] == 'validation':
token = self.request.options['cache'][1]
info_bert = Encoder().encode(
(bert.Atom('info'), bert.Atom('cache'), [bert.Atom('validation'), bert.Atom(token)]))
info_header = struct.pack(">l", len(info_bert))
sock.sendall(info_header)
sock.sendall(info_bert)
header = struct.pack(">l", len(bert_request))
sock.sendall(header)
sock.sendall(bert_request)
lenheader = sock.recv(4)
if lenheader is None: raise error.ProtocolError(error.ProtocolError.NO_HEADER)
length = struct.unpack(">l",lenheader)[0]
bert_response = ''
while len(bert_response) < length:
response_part = sock.recv(length - len(bert_response))
if response_part is None or len(response_part) == 0:
raise error.ProtocolError(error.ProtocolError.NO_DATA)
bert_response += response_part
sock.close()
return bert_response
except socket.timeout, e:
raise error.ReadTimeoutError('No response from %s:%s in %ss' %
(self.service.host, self.service.port, self.service.timeout))
except socket.error, e:
raise error.ConnectionError('Unable to connect to %s:%s' % (self.service.host, self.service.port))
class Encoder(object):
def encode(self, python_request):
return bert.encode(python_request)
class Decoder(object):
def decode(self, bert_response):
python_response = bert.decode(bert_response)
if python_response[0] == bert.Atom('reply'):
return python_response[1]
elif python_response[0] == bert.Atom('noreply'):
return None
elif python_response[0] == bert.Atom('error'):
return self._error(python_response[1])
else:
raise error.BERTRPCError('invalid response received from server')
def _error(self, err):
level, code, klass, message, backtrace = err
exception_map = {
bert.Atom('protocol'): error.ProtocolError,
bert.Atom('server'): error.ServerError,
bert.Atom('user'): error.UserError,
bert.Atom('proxy'): error.ProxyError
}
exception = exception_map.get(level, None)
if level is not None:
raise exception([code, message], klass, backtrace)
else:
raise error.BERTRPCError('invalid error code received from server')
if __name__ == '__main__':
print 'initializing service now'
service = Service('localhost', 9999)
print 'RPC call now'
response = service.request('call').calc.add(1, 2)
print 'response is: %s' % repr(response)
print 'RPC call now, with options'
options = {'cache': ['validation','myToken']}
response = service.request('call', options).calc.add(5, 6)
print 'response is: %s' % repr(response)
print 'RPC cast now'
response = service.request('cast').stats.incr()
print 'response is: %s' % repr(response)
| 37.551515
| 113
| 0.580213
|
f15a7ff96a74084d72074d6c16d68803044f0dff
| 2,601
|
py
|
Python
|
meltingpot/python/utils/scenarios/wrappers/base.py
|
jagapiou/meltingpot
|
b3c07f2de74e4f2577749cfd3b3173cfe5cfdc30
|
[
"Apache-2.0"
] | null | null | null |
meltingpot/python/utils/scenarios/wrappers/base.py
|
jagapiou/meltingpot
|
b3c07f2de74e4f2577749cfd3b3173cfe5cfdc30
|
[
"Apache-2.0"
] | null | null | null |
meltingpot/python/utils/scenarios/wrappers/base.py
|
jagapiou/meltingpot
|
b3c07f2de74e4f2577749cfd3b3173cfe5cfdc30
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 DeepMind Technologies Limited.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for wrappers."""
from meltingpot.python import substrate
Substrate = substrate.Substrate
class Wrapper(substrate.Substrate):
"""Base class for Substrate wrappers."""
def __init__(self, env: substrate.Substrate):
"""Initializer.
Args:
env: An environment to wrap. This environment will be closed with this
wrapper.
"""
self._env = env
def reset(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.reset(*args, **kwargs)
def step(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.step(*args, **kwargs)
def reward_spec(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.reward_spec(*args, **kwargs)
def discount_spec(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.discount_spec(*args, **kwargs)
def observation_spec(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.observation_spec(*args, **kwargs)
def action_spec(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.action_spec(*args, **kwargs)
def close(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.close(*args, **kwargs)
def observation(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.observation(*args, **kwargs)
def events(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.events(*args, **kwargs)
def list_property(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.list_property(*args, **kwargs)
def write_property(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.write_property(*args, **kwargs)
def read_property(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.read_property(*args, **kwargs)
def observables(self, *args, **kwargs) -> ...:
"""See base class."""
return self._env.observables(*args, **kwargs)
| 30.964286
| 76
| 0.645521
|
5a6d2766eb9930a37b526a7a3e8d73e3fe87c567
| 6,790
|
py
|
Python
|
src/repositories/room_messages_repository.py
|
WebisD/chat-irc-protocol
|
6720d1789a366bfd7943b81c7c84cb0941c66e80
|
[
"MIT"
] | null | null | null |
src/repositories/room_messages_repository.py
|
WebisD/chat-irc-protocol
|
6720d1789a366bfd7943b81c7c84cb0941c66e80
|
[
"MIT"
] | null | null | null |
src/repositories/room_messages_repository.py
|
WebisD/chat-irc-protocol
|
6720d1789a366bfd7943b81c7c84cb0941c66e80
|
[
"MIT"
] | 3
|
2021-06-03T12:27:27.000Z
|
2021-06-14T22:48:36.000Z
|
from typing import List, Tuple
from repositories import *
from dtos.dto_room_messages import RoomMessages
__all__ = ['RoomMessagesRepository']
class RoomMessagesRepository(RepositoryInterface):
def __init__(self, db_name: str = "concord.db") -> None:
super().__init__(db_name)
self.table_name: str = 'room_messages'
def find_all_by_user_id(self, message_id: str) -> Tuple[List[RoomMessages], bool]:
try:
self.controller_database.run_query_with_args(
f'''
SELECT * from {self.table_name} where message_id = :id
''',
{"id": message_id}
)
result = self.controller_database.fetch_all_results_from_last_query()
message_list: List[RoomMessages] = []
for rows in result:
message_list.append(RoomMessages(*rows))
return message_list, True
except Exception as exp:
print(f"Could not find room_messages with message_id {message_id}")
print(repr(exp))
return [], False
def find_all_by_room_id(self, room_id: str) -> Tuple[List[RoomMessages], bool]:
try:
self.controller_database.run_query_with_args(
f'''
SELECT * from {self.table_name} where room_id = :id
''',
{"id": room_id}
)
result = self.controller_database.fetch_all_results_from_last_query()
message_list: List[RoomMessages] = []
for rows in result:
message_list.append(RoomMessages(*rows))
return message_list, True
except Exception as exp:
print(f"Could not find room_messages with room_id {room_id}")
print(repr(exp))
return [], False
def find_one_by_user_id(self, message_id: str) -> Tuple[RoomMessages or None, bool]:
try:
self.controller_database.run_query_with_args(
f'''
SELECT * from {self.table_name} where message_id = :id
''',
{"id": message_id}
)
result = self.controller_database.fetch_one_result_from_last_query()
if result:
message = RoomMessages(*result)
return message, True
except Exception as exp:
print(f"Could not find participant with room_id {message_id}")
print(repr(exp))
return None, False
def find_one_by_room_id(self, room_id: str) -> Tuple[RoomMessages or None, bool]:
try:
self.controller_database.run_query_with_args(
f'''
SELECT * from {self.table_name} where room_id = :id
''',
{"id": room_id}
)
result = self.controller_database.fetch_one_result_from_last_query()
if result:
message = RoomMessages(*result)
return message, True
except Exception as exp:
print(f"Could not find room_message with room_id {room_id}")
print(repr(exp))
return None, False
def update_by_user_id(self, message_id: str, new_data: RoomMessages) -> bool:
try:
self.controller_database.run_query_with_args(
query=f'''
UPDATE {self.table_name}
SET
message_id = :message_id,
room_id = :room_id
WHERE message_id = :search_user_id;
''',
args={
"search_user_id": message_id,
"message_id": new_data.message_id,
"room_id": new_data.room_id
}
)
self.controller_database.save_changes()
except Exception as exp:
print(f"Could not update room_messages with room_id {message_id}")
print(repr(exp))
return False
return True
def update_by_room_id(self, room_id: str, new_data: RoomMessages) -> bool:
try:
self.controller_database.run_query_with_args(
query=f'''
UPDATE {self.table_name}
SET
user_id = :message_id,
room_id = :room_id
WHERE room_id = :search_room_id;
''',
args={
"search_room_id": room_id,
"message_id": new_data.message_id,
"room_id": new_data.room_id
}
)
self.controller_database.save_changes()
except Exception as exp:
print(f"Could not update room_messages with room_id {room_id}")
print(repr(exp))
return False
return True
def delete_by_user_id(self, message_id: str) -> bool:
try:
self.controller_database.run_query_with_args(
query=f'''
DELETE FROM {self.table_name}
WHERE user_id = :message_id
''',
args={"message_id": message_id}
)
self.controller_database.save_changes()
except Exception as exp:
print(f"Could not delete room_messages with message_id {message_id}")
print(repr(exp))
return False
return True
def delete_by_room_id(self, room_id: str) -> bool:
try:
self.controller_database.run_query_with_args(
query=f'''
DELETE FROM {self.table_name}
WHERE room_id = :room_id
''',
args={"room_id": room_id}
)
self.controller_database.save_changes()
except Exception as exp:
print(f"Could not delete room_messages with room_id {room_id}")
print(repr(exp))
return False
return True
def put(self, room_messages: RoomMessages) -> bool:
try:
self.controller_database.run_query_with_args(
query=f'''
INSERT INTO {self.table_name}(message_id, room_id)
VALUES (:message_id,:room_id);
''',
args={
"message_id": room_messages.message_id,
"room_id": room_messages.room_id,
}
)
self.controller_database.save_changes()
except Exception as exp:
print(f"Could not create room_messages {room_messages.__str__()}")
print(repr(exp))
return False
return True
| 31.004566
| 88
| 0.529161
|
651ffe53868ff5b7fc01fd3b1b7a395b1f57fae5
| 2,280
|
py
|
Python
|
observatory/dashboard/models/Contributor.py
|
natestedman/Observatory
|
6e810b22d844416b2a3057e99ef23baa0d122ab4
|
[
"0BSD"
] | 1
|
2015-01-16T04:17:54.000Z
|
2015-01-16T04:17:54.000Z
|
observatory/dashboard/models/Contributor.py
|
natestedman/Observatory
|
6e810b22d844416b2a3057e99ef23baa0d122ab4
|
[
"0BSD"
] | null | null | null |
observatory/dashboard/models/Contributor.py
|
natestedman/Observatory
|
6e810b22d844416b2a3057e99ef23baa0d122ab4
|
[
"0BSD"
] | null | null | null |
# Copyright (c) 2010, individual contributors (see AUTHORS file)
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from django.db import models
from django.contrib.auth.models import User
from Project import Project
# Contributors are automatically added to a project when a commit by them is
# found. They can either be associated with a User or simply have a string
# name. When a User is added to a project's authors, it will no longer be
# displayed as a contributor, but it will remain stored as one in case the
# author status is removed.
class Contributor(models.Model):
class Meta:
app_label = 'dashboard'
# the project the person contributed to
projects = models.ManyToManyField(Project)
# the person's user model or name/email
user = models.ForeignKey(User, blank = True, null = True)
name = models.CharField(max_length = 200, blank = True, null = True)
email = models.CharField(max_length = 200, blank = True, null = True)
def save(self, *args, **kwargs):
# check field lengths
if self.name is not None and len(self.name) > 200:
self.title = self.title[0:197] + "..."
if self.email is not None and len(self.email) > 200:
self.email = self.email[0:197] + "..."
super(Contributor, self).save(*args, **kwargs)
def __unicode__(self):
if self.user:
return self.user.get_full_name()
if self.name and self.email:
return "{0} <{1}>".format(self.name, self.email)
if self.name and not self.email:
return self.name
if self.email and not self.name:
return self.email
return str(self.id)
| 40
| 76
| 0.719298
|
d39f6776a4751d875bd81a29ed7a924ba68a64bf
| 3,500
|
py
|
Python
|
mailase/tests/functional/test_mail.py
|
greghaynes/Mailase
|
23915b8aff6ac2c5b0cbc969441fd02a86048c08
|
[
"Apache-2.0"
] | 1
|
2015-02-23T20:13:01.000Z
|
2015-02-23T20:13:01.000Z
|
mailase/tests/functional/test_mail.py
|
greghaynes/Mailase
|
23915b8aff6ac2c5b0cbc969441fd02a86048c08
|
[
"Apache-2.0"
] | null | null | null |
mailase/tests/functional/test_mail.py
|
greghaynes/Mailase
|
23915b8aff6ac2c5b0cbc969441fd02a86048c08
|
[
"Apache-2.0"
] | null | null | null |
from testtools.matchers import Contains, Equals
from webtest.app import AppError
from mailase.tests.functional import base
class TestRootController(base.FunctionalTest):
def test_get_not_found(self):
response = self.app.get('/a/bogus/url', expect_errors=True)
assert response.status_int == 404
class TestMailboxController(base.FunctionalTest):
def test_index_without_trailing_slash(self):
response = self.app.get('/mailboxes')
self.assertThat(response.status_int, Equals(302))
self.assertThat(response.location,
Equals('http://localhost/mailboxes/'))
def test_index_no_mailboxes(self):
response = self.app.get('/mailboxes/')
self.assertThat(response.status_int, Equals(200))
self.assertThat(response.json, Equals([]))
def test_index_one_mailbox(self):
self.useMailbox('INBOX')
response = self.app.get('/mailboxes/')
self.assertThat(response.status_int, Equals(200))
self.assertThat(response.json, Equals([{'id': 'INBOX',
'mail_briefs': []}]))
def test_index_multi_mailbox(self):
self.useMailbox('INBOX')
self.useMailbox('SPAM')
response = self.app.get('/mailboxes/')
self.assertThat(response.status_int, Equals(200))
self.assertThat(response.json,
Contains({'id': 'INBOX',
'mail_briefs': []}))
self.assertThat(response.json,
Contains({'id': 'SPAM',
'mail_briefs': []}))
self.assertThat(len(response.json), Equals(2))
def test_mailbox_invalid_index(self):
self.assertRaises(AppError, self.app.get, '/mailboxes/invalid')
class TestMailcontroller(base.FunctionalTest):
def test_get_message_cur_hello_world(self):
self.useMessage('helloworld', 'INBOX', 'cur')
response = self.app.get('/mail/INBOX/helloworld:')
msg = {'brief': {'id': 'helloworld:',
'mailbox_id': 'INBOX',
'modified_on': 123456,
'receiver': '"Mailase Receiver" <receiver@mailase.test>',
'sender': '"Mailase Sender" <sender@mailase.test>',
'subdir': 'cur',
'subject': 'Hello World!'},
'text_payloads': ['Hello, World!\n']}
self.assertThat(response.json, Equals(msg))
def test_get_message_new_hello_world(self):
self.useMessage('helloworld', 'INBOX', 'new')
response = self.app.get('/mail/INBOX/helloworld:')
msg = {'brief': {'id': 'helloworld:',
'mailbox_id': 'INBOX',
'modified_on': 123456,
'receiver': '"Mailase Receiver" <receiver@mailase.test>',
'sender': '"Mailase Sender" <sender@mailase.test>',
'subdir': 'new',
'subject': 'Hello World!'},
'text_payloads': ['Hello, World!\n']}
self.assertThat(response.json, Equals(msg))
def test_get_invalid_message(self):
# Make sure dirs are made
self.useMessage('helloworld', 'INBOX', 'cur')
self.assertRaises(AppError, self.app.get, '/mail/INBOX/missing')
def test_get_message_invalid_mailbox(self):
self.assertRaises(AppError, self.app.get, '/mail/INBOX/missing')
| 42.168675
| 82
| 0.575429
|
097d82d07be26b66b27252d4d95e5d82a9c0d463
| 2,700
|
py
|
Python
|
footing/setup.py
|
Opus10/footing
|
358b644a5f80d2c6a49092b719050ccd9bcfaec1
|
[
"BSD-3-Clause"
] | 3
|
2022-01-31T13:56:31.000Z
|
2022-03-30T01:11:08.000Z
|
footing/setup.py
|
Opus10/footing
|
358b644a5f80d2c6a49092b719050ccd9bcfaec1
|
[
"BSD-3-Clause"
] | null | null | null |
footing/setup.py
|
Opus10/footing
|
358b644a5f80d2c6a49092b719050ccd9bcfaec1
|
[
"BSD-3-Clause"
] | null | null | null |
"""
footing.setup
~~~~~~~~~~~~~
Creates and initializes a project from a template
"""
import subprocess
import unittest.mock
import cookiecutter.generate as cc_generate
import cookiecutter.hooks as cc_hooks
import footing.check
import footing.constants
import footing.utils
def _patched_run_hook(hook_name, project_dir, context):
"""Used to patch cookiecutter's ``run_hook`` function.
This patched version ensures that the footing.yaml file is created before
any cookiecutter hooks are executed
"""
if hook_name == 'post_gen_project':
with footing.utils.cd(project_dir):
footing.utils.write_footing_config(
context['cookiecutter'],
context['template'],
context['version'],
)
return cc_hooks.run_hook(hook_name, project_dir, context)
def _generate_files(repo_dir, config, template, version):
"""Uses cookiecutter to generate files for the project.
Monkeypatches cookiecutter's "run_hook" to ensure that the footing.yaml file is
generated before any hooks run. This is important to ensure that hooks can also
perform any actions involving footing.yaml
"""
with unittest.mock.patch('cookiecutter.generate.run_hook', side_effect=_patched_run_hook):
cc_generate.generate_files(
repo_dir=repo_dir,
context={
'cookiecutter': config,
'template': template,
'version': version,
},
overwrite_if_exists=False,
output_dir='.',
)
@footing.utils.set_cmd_env_var('setup')
def setup(template, version=None):
"""Sets up a new project from a template
Note that the `footing.constants.FOOTING_ENV_VAR` is set to 'setup' during the duration
of this function.
Args:
template (str): The git path to a template
version (str, optional): The version of the template to use when updating. Defaults
to the latest version
"""
footing.check.not_in_git_repo()
repo_path = footing.utils.get_repo_path(template)
msg = (
'You will be prompted for the parameters of your new project.'
' Please read the docs at https://github.com/{} before entering parameters.'
).format(repo_path)
print(msg)
cc_repo_dir, config = footing.utils.get_cookiecutter_config(template, version=version)
if not version:
with footing.utils.cd(cc_repo_dir):
ret = footing.utils.shell('git rev-parse HEAD', stdout=subprocess.PIPE)
version = ret.stdout.decode('utf-8').strip()
_generate_files(repo_dir=cc_repo_dir, config=config, template=template, version=version)
| 32.53012
| 94
| 0.674444
|
d599b463f6c2b587fb24cd096131a95c9434ecff
| 3,135
|
py
|
Python
|
autogluon/utils/tabular/ml/models/tabular_nn/hyperparameters/searchspaces.py
|
zhanghang1989/autogluon
|
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
|
[
"Apache-2.0"
] | 2
|
2021-09-14T21:28:54.000Z
|
2021-11-17T09:52:41.000Z
|
autogluon/utils/tabular/ml/models/tabular_nn/hyperparameters/searchspaces.py
|
zhanghang1989/autogluon
|
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
|
[
"Apache-2.0"
] | null | null | null |
autogluon/utils/tabular/ml/models/tabular_nn/hyperparameters/searchspaces.py
|
zhanghang1989/autogluon
|
8bfe6b0da8915020eeb9895fd18d7688c0d604c1
|
[
"Apache-2.0"
] | 1
|
2021-02-04T23:29:47.000Z
|
2021-02-04T23:29:47.000Z
|
""" Default hyperparameter search spaces used in Neural network model """
from .......core import Categorical, Real
from ....constants import BINARY, MULTICLASS, REGRESSION
def get_default_searchspace(problem_type, num_classes=None):
if problem_type == BINARY:
return get_searchspace_binary().copy()
elif problem_type == MULTICLASS:
return get_searchspace_multiclass(num_classes=num_classes)
elif problem_type == REGRESSION:
return get_searchspace_regression().copy()
else:
return get_searchspace_binary().copy()
def get_searchspace_multiclass(num_classes):
# Search space we use by default (only specify non-fixed hyperparameters here): # TODO: move to separate file
params = {
'learning_rate': Real(1e-4, 3e-2, default=3e-4, log=True),
'weight_decay': Real(1e-12, 0.1, default=1e-6, log=True),
'dropout_prob': Real(0.0, 0.5, default=0.1),
# 'layers': Categorical(None, [200, 100], [256], [2056], [1024, 512, 128], [1024, 1024, 1024]),
'layers': Categorical(None, [200, 100], [256], [100, 50], [200, 100, 50], [50, 25], [300, 150]),
'embedding_size_factor': Real(0.5, 1.5, default=1.0),
'network_type': Categorical('widedeep','feedforward'),
'use_batchnorm': Categorical(True, False),
'activation': Categorical('relu', 'softrelu'),
# 'batch_size': Categorical(512, 1024, 2056, 128), # this is used in preprocessing so cannot search atm
}
return params
def get_searchspace_binary():
params = {
'learning_rate': Real(1e-4, 3e-2, default=3e-4, log=True),
'weight_decay': Real(1e-12, 0.1, default=1e-6, log=True),
'dropout_prob': Real(0.0, 0.5, default=0.1),
# 'layers': Categorical(None, [200, 100], [256], [2056], [1024, 512, 128], [1024, 1024, 1024]),
'layers': Categorical(None, [200, 100], [256], [100, 50], [200, 100, 50], [50, 25], [300, 150]),
'embedding_size_factor': Real(0.5, 1.5, default=1.0),
'network_type': Categorical('widedeep','feedforward'),
'use_batchnorm': Categorical(True, False),
'activation': Categorical('relu', 'softrelu'),
# 'batch_size': Categorical(512, 1024, 2056, 128), # this is used in preprocessing so cannot search atm
}
return params
def get_searchspace_regression():
params = {
'learning_rate': Real(1e-4, 3e-2, default=3e-4, log=True),
'weight_decay': Real(1e-12, 0.1, default=1e-6, log=True),
'dropout_prob': Real(0.0, 0.5, default=0.1),
# 'layers': Categorical(None, [200, 100], [256], [2056], [1024, 512, 128], [1024, 1024, 1024]),
'layers': Categorical(None, [200, 100], [256], [100, 50], [200, 100, 50], [50, 25], [300, 150]),
'embedding_size_factor': Real(0.5, 1.5, default=1.0),
'network_type': Categorical('widedeep','feedforward'),
'use_batchnorm': Categorical(True, False),
'activation': Categorical('relu', 'softrelu', 'tanh'),
# 'batch_size': Categorical(512, 1024, 2056, 128), # this is used in preprocessing so cannot search atm
}
return params
| 48.984375
| 114
| 0.628708
|
7ebe8b55937fa9a2f0d8887aa13b77d46b58be98
| 449
|
py
|
Python
|
window/__init__.py
|
fredi-68/OWWvisualScripting
|
0123222ef9eb2a69504eb046ce250bc667922ae0
|
[
"MIT"
] | 2
|
2019-07-13T23:01:05.000Z
|
2021-04-28T22:11:18.000Z
|
window/__init__.py
|
fredi-68/OWWvisualScripting
|
0123222ef9eb2a69504eb046ce250bc667922ae0
|
[
"MIT"
] | 1
|
2019-10-06T09:34:58.000Z
|
2019-10-06T09:34:58.000Z
|
window/__init__.py
|
fredi-68/OWWvisualScripting
|
0123222ef9eb2a69504eb046ce250bc667922ae0
|
[
"MIT"
] | null | null | null |
"""
Window Framework 3.0
"""
__title__ = "window"
__author__ = "fredi_68"
__version__ = "3.0.0"
from .window import Window, ApplicationHandle, EventHandle
from .enums import SDLFlags, SDLDrivers
from .components import Background, Image, Text, HorizontalGradient, VerticalGradient, Surface
from .style import Style, DefaultStyle, StylePackageError, StylePackage
from .errors import WindowError
from . import window, enums, style, ui, components
| 26.411765
| 94
| 0.786192
|
2c2fe66a998afeed6902018ce0aff57ab5ee81db
| 3,875
|
py
|
Python
|
src/test_emb.py
|
anhnt170489/FunMOT
|
6eb794bd485be42270eaee3804e13d38a897a945
|
[
"MIT"
] | null | null | null |
src/test_emb.py
|
anhnt170489/FunMOT
|
6eb794bd485be42270eaee3804e13d38a897a945
|
[
"MIT"
] | null | null | null |
src/test_emb.py
|
anhnt170489/FunMOT
|
6eb794bd485be42270eaee3804e13d38a897a945
|
[
"MIT"
] | 1
|
2021-11-09T02:50:19.000Z
|
2021-11-09T02:50:19.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import _init_paths
import argparse
import torch
import json
import time
import os
import cv2
import math
from sklearn import metrics
from scipy import interpolate
import numpy as np
from torchvision.transforms import transforms as T
import torch.nn.functional as F
from models.model import create_model, load_model
from datasets.dataset.jde import JointDataset, collate_fn
from models.utils import _tranpose_and_gather_feat
from utils.utils import xywh2xyxy, ap_per_class, bbox_iou
from opts import opts
from models.decode import mot_decode
from utils.post_process import ctdet_post_process
def test_emb(
opt,
batch_size=16,
img_size=(1088, 608),
print_interval=40,
):
data_cfg = opt.data_cfg
f = open(data_cfg)
data_cfg_dict = json.load(f)
f.close()
nC = 1
test_paths = data_cfg_dict['test_emb']
dataset_root = data_cfg_dict['root']
if opt.gpus[0] >= 0:
opt.device = torch.device('cuda')
else:
opt.device = torch.device('cpu')
print('Creating model...')
model = create_model(opt.arch, opt.heads, opt.head_conv)
model = load_model(model, opt.load_model)
# model = torch.nn.DataParallel(model)
model = model.to(opt.device)
model.eval()
# Get dataloader
transforms = T.Compose([T.ToTensor()])
dataset = tDataset(opt, dataset_root, test_paths, img_size, augment=False, transforms=transforms)
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=False,
num_workers=8, drop_last=False)
embedding, id_labels = [], []
print('Extracting pedestrain features...')
for batch_i, batch in enumerate(dataloader):
t = time.time()
output = model(batch['input'].cuda())[-1]
id_head = _tranpose_and_gather_feat(output['id'], batch['ind'].cuda())
id_head = id_head[batch['reg_mask'].cuda() > 0].contiguous()
emb_scale = math.sqrt(2) * math.log(opt.nID - 1)
id_head = emb_scale * F.normalize(id_head)
id_target = batch['ids'].cuda()[batch['reg_mask'].cuda() > 0]
for i in range(0, id_head.shape[0]):
if len(id_head.shape) == 0:
continue
else:
feat, label = id_head[i], id_target[i].long()
if label != -1:
embedding.append(feat)
id_labels.append(label)
if batch_i % print_interval == 0:
print(
'Extracting {}/{}, # of instances {}, time {:.2f} sec.'.format(batch_i, len(dataloader), len(id_labels),
time.time() - t))
print('Computing pairwise similairity...')
if len(embedding) < 1:
return None
embedding = torch.stack(embedding, dim=0).cuda()
id_labels = torch.LongTensor(id_labels)
n = len(id_labels)
print(n, len(embedding))
assert len(embedding) == n
embedding = F.normalize(embedding, dim=1)
pdist = torch.mm(embedding, embedding.t()).cpu().numpy()
gt = id_labels.expand(n, n).eq(id_labels.expand(n, n).t()).numpy()
up_triangle = np.where(np.triu(pdist) - np.eye(n) * pdist != 0)
pdist = pdist[up_triangle]
gt = gt[up_triangle]
far_levels = [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1]
far, tar, threshold = metrics.roc_curve(gt, pdist)
interp = interpolate.interp1d(far, tar)
tar_at_far = [interp(x) for x in far_levels]
for f, fa in enumerate(far_levels):
print('TPR@FAR={:.7f}: {:.4f}'.format(fa, tar_at_far[f]))
return tar_at_far
if __name__ == '__main__':
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
opt = opts().init()
with torch.no_grad():
tpr = test_emb(opt, batch_size=4)
| 34.598214
| 120
| 0.63329
|
34059d6df06fc1cfa0e27638c93a0a0077eafc99
| 4,606
|
py
|
Python
|
DSKM.py
|
ehsansherkat/DSKM
|
aacca3d7f8a5a4f352f8104aea6237e296c176e6
|
[
"CC-BY-3.0"
] | null | null | null |
DSKM.py
|
ehsansherkat/DSKM
|
aacca3d7f8a5a4f352f8104aea6237e296c176e6
|
[
"CC-BY-3.0"
] | null | null | null |
DSKM.py
|
ehsansherkat/DSKM
|
aacca3d7f8a5a4f352f8104aea6237e296c176e6
|
[
"CC-BY-3.0"
] | null | null | null |
"""
Author: Ehsan Sherkat
Last modification date: August 30, 2018
"""
import numpy as np
import sys
import util
def DSKM(data, k):
"""
Deterministic Seeding KMeans (DSKM)
:param data
:param k: number of clusters
"""
initialCenters = np.zeros((k, data.shape[1]))
initialCentersIndex = []
initialCentersIndexOriginal = [0] * k
data_sorted = []
rank_documents = util.rankDocuments(data, reverse=True)
similarities = np.zeros((data.shape[0], data.shape[0]))
for doc in rank_documents:
data_sorted.append(data[doc[0]])
pairWiseSimilarityMatrix = util.pairWiseSimilarity(data_sorted)
initialCenters[0] = data_sorted[0]
initialCentersIndexOriginal[0] = rank_documents[0][0]
initialCentersIndex.append(0)
util.getDsim(similarities, pairWiseSimilarityMatrix, 0)
averageHash = {}
averageHash[0] = np.average(similarities[0])
counter = 0
while counter < k:
max_different = 0
for index in range(0, pairWiseSimilarityMatrix.shape[0]):
if index not in initialCentersIndex:
found = True
different = 0
for centerIndex in initialCentersIndex:
maxSimilarity = averageHash[centerIndex]
if similarities[centerIndex][index] >= maxSimilarity:
found = False
break
else:
different += 1
if different > max_different:
max_different = different
if found:
initialCentersIndexOriginal[counter] = rank_documents[index][0]
util.getDsim(similarities, pairWiseSimilarityMatrix, index)
averageHash[index] = np.average(similarities[index])
if counter == 0:
initialCentersIndex[0] = index
else:
initialCentersIndex.append(index)
initialCenters[counter] = data_sorted[index]
counter += 1
break
if not found:
different = [0] * similarities.shape[0]
for index in range(0, pairWiseSimilarityMatrix.shape[0]):
if index not in initialCentersIndex:
for initialCenterIndex in initialCentersIndex:
different[index] += similarities[initialCenterIndex][index]
else:
different[index] = sys.maxint
index = np.argmin(different)
initialCentersIndexOriginal[counter] = rank_documents[index][0]
util.getDsim(similarities, pairWiseSimilarityMatrix, index)
averageHash[index] = np.average(similarities[index])
if counter == 0:
initialCentersIndex[0] = index
else:
initialCentersIndex.append(index)
initialCenters[counter] = data_sorted[index]
counter += 1
extend = 15
for index, centerIndex in enumerate(initialCentersIndex):
initialCenters[index] = util.getCenter(similarities[centerIndex].argsort()[-extend:][::-1], data_sorted)
return initialCentersIndexOriginal, initialCentersIndex, initialCenters
def KMeans(data, n_clusters, seed="DSKM", conv_test=1e-6):
"""
The original KMeans implementation optimized for text document clustering
:param data: document-term matrix
:param n_clusters: number of clusters
:param seed: seeding method
:param conv_test: conversion threshold
:return:
"""
inertiaMax = 0.0
bestClustersCenter = np.zeros((n_clusters, data.shape[1]))
bestLabels = [0] * data.shape[0]
stepsBest = 0
if seed == "random":
centers = util.randomInit(data, n_clusters)
elif seed == "DSKM":
_, _, centers = DSKM(data, n_clusters)
else:
raise ValueError('Invalid seeding method. Select random or DSKM.')
inertia_new = 1.0
inertia = 0.0
steps = 0
while abs(inertia_new - inertia) > conv_test:
steps += 1
inertia = inertia_new
clusters, labels, inertia_new, similarityMatrix, inertia2 = util.assignDataPoints(data, centers, n_clusters) #assignment step
centers = util.calculateCenters(clusters, data) #update step
if inertia > inertiaMax:
stepsBest = steps
inertiaMax = inertia
bestClustersCenter = np.copy(centers)
bestLabels = np.copy(labels)
return bestLabels, bestClustersCenter, inertiaMax, stepsBest
| 34.118519
| 133
| 0.6066
|
992d3f1fed7e9bdcbc8a56f3c409224eddd16616
| 1,058
|
py
|
Python
|
tests_without_pytest/lesson2.2_step7.py
|
adilgereev/selenium_course
|
7b7ca68bb7d915c3e973292ec18d8dbaf4dc363e
|
[
"Apache-2.0"
] | null | null | null |
tests_without_pytest/lesson2.2_step7.py
|
adilgereev/selenium_course
|
7b7ca68bb7d915c3e973292ec18d8dbaf4dc363e
|
[
"Apache-2.0"
] | null | null | null |
tests_without_pytest/lesson2.2_step7.py
|
adilgereev/selenium_course
|
7b7ca68bb7d915c3e973292ec18d8dbaf4dc363e
|
[
"Apache-2.0"
] | null | null | null |
from selenium import webdriver
import time
import os
link = "http://suninjuly.github.io/file_input.html"
browser = webdriver.Chrome()
browser.get(link)
# Заполнение текстовых полей
browser.find_element_by_css_selector("body > div > form > div > input:nth-child(2)").send_keys("Шарап")
browser.find_element_by_css_selector("body > div > form > div > input:nth-child(4)").send_keys("Адильгереев")
browser.find_element_by_css_selector("body > div > form > div > input:nth-child(6)").send_keys("sh@mail.ru")
# Загрузка файла
current_dir = os.path.abspath(r"C:\Users\admin\selenium_course")
# Верхнуюю строчку можно заменить на current_dir = os.path.abspath(os.path.dirname(__file__))
file_path = os.path.join(current_dir, 'file.txt')
element = browser.find_element_by_css_selector("#file")
element.send_keys(file_path)
# Клик по кнопке Submit
browser.find_element_by_css_selector("body > div > form > button").click()
# ожидание чтобы визуально оценить результаты прохождения скрипта
time.sleep(5)
# закрываем браузер после всех манипуляций
browser.quit()
| 36.482759
| 109
| 0.775047
|
8b7624bc16f3e7166c768300d6bfb3f9c9645b69
| 6,170
|
py
|
Python
|
hf/load/routines/settings.py
|
HashFast/hashfast-tools
|
9617691ac997f12085b688c3ecc6746e8510976d
|
[
"BSD-3-Clause"
] | 1
|
2020-12-15T02:49:36.000Z
|
2020-12-15T02:49:36.000Z
|
hf/load/routines/settings.py
|
HashFast/hashfast-tools
|
9617691ac997f12085b688c3ecc6746e8510976d
|
[
"BSD-3-Clause"
] | null | null | null |
hf/load/routines/settings.py
|
HashFast/hashfast-tools
|
9617691ac997f12085b688c3ecc6746e8510976d
|
[
"BSD-3-Clause"
] | 3
|
2015-09-02T00:31:06.000Z
|
2020-12-15T02:52:06.000Z
|
# Copyright (c) 2014, HashFast Technologies LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of HashFast Technologies LLC nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL HASHFAST TECHNOLOGIES LLC BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import random
import sys
import time
from abc import ABCMeta, abstractmethod
from collections import deque
from usb.core import USBError
from ..hf import Send, Receive
from ..hf import HF_Parse, Garbage
from ..hf import SHUTDOWN
from ..hf import rand_job, det_job
from ..hf import check_nonce_work, sequence_a_leq_b
from ...errors import HF_Error, HF_Thermal, HF_InternalError, HF_NotConnectedError
from ...util import with_metaclass, int_to_lebytes, lebytes_to_int
from ...protocol.frame import HF_Frame, opcodes, opnames
from ...protocol.op_settings import HF_OP_SETTINGS, hf_settings, hf_die_settings
from ...protocol.op_power import HF_OP_POWER
from ...protocol.op_usb_init import HF_OP_USB_INIT, decode_op_status_job_map, list_available_cores
from ...protocol.op_usb_shutdown import HF_OP_USB_SHUTDOWN
from ...protocol.op_hash import HF_OP_HASH
from ...protocol.op_nonce import HF_OP_NONCE
from ...protocol.op_status import HF_OP_STATUS
from ...protocol.op_usb_notice import HF_OP_USB_NOTICE
from ...protocol.op_fan import HF_OP_FAN
from .base import BaseRoutine
class SettingsRoutine(BaseRoutine):
def initialize(self):
self.global_state = 'setup'
self.op_settings = HF_OP_SETTINGS.forValues(settings=hf_settings())
def set_reference(self, ref):
self.op_settings.settings.ref_frequency = ref
self.global_state = 'setup'
def setup(self, die, freq, volt):
assert die < 4
self.op_settings.settings.die[die] = hf_die_settings.forValues(frequency=freq, voltage=volt)
self.op_settings.settings.generate_frame_data()
self.op_settings.construct_framebytes()
self.global_state = 'setup'
def one_cycle(self):
try:
# Fix: Every time we send, we want also to receive (to make sure nothing
# deadlocks), so the send and receive objects should be combined.
# Fix: Do we want to have a delay in here or some sort of select() like thing?
self.receiver.receive()
self.transmitter.send([])
traffic = self.receiver.read()
if traffic:
self.parser.input(traffic)
####################
# READ
####################
if self.global_state is 'read':
token = self.parser.next_token()
if token:
if isinstance(token, HF_OP_SETTINGS):
self.process_op_settings(token)
self.op_settings = token
self.global_state = 'wait'
return False
op = HF_OP_SETTINGS()
self.transmitter.send(op.framebytes)
self.printer("Sent OP_SETTINGS request")
####################
# WAIT
####################
elif self.global_state is 'wait':
time.sleep(1)
####################
# SETUP
####################
elif self.global_state is 'setup':
self.op_settings.settings.generate_frame_data()
self.op_settings.construct_framebytes()
self.transmitter.send(self.op_settings.framebytes)
self.printer("Sent OP_SETTINGS write")
#self.printer(self.op_settings.framebytes)
self.printer(self.op_settings)
self.global_state = 'confirm'
####################
# CONFIRM
####################
elif self.global_state is 'confirm':
token = self.parser.next_token()
if token:
if isinstance(token, HF_OP_SETTINGS):
self.process_op_settings(token)
time.sleep(1)
op_power = HF_OP_POWER(power=0x1)
self.transmitter.send(op_power.framebytes)
self.printer("Sent OP_POWER")
time.sleep(1)
op_power = HF_OP_POWER(power=0x2)
self.transmitter.send(op_power.framebytes)
self.printer("Sent OP_POWER")
time.sleep(1)
self.global_state = 'bleh'
return False
op = HF_OP_SETTINGS()
self.transmitter.send(op.framebytes)
self.printer("Sent OP_SETTINGS request")
else:
# Unknown state
raise HF_Error("Unknown global_state: %s" % (self.global_state))
return True
except KeyboardInterrupt:
self.end()
return False
except USBError as e:
#e.errno
self.printer("USB Error: (%s, %s, %s)" % (sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))
self.end()
return False
except:
self.printer("Generic exception handler: (%s, %s, %s)" % (sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2]))
self.end()
return False
| 38.5625
| 121
| 0.666126
|
9e6521a63ae439e33cdb80c08ec1a9e119763c8b
| 1,092
|
py
|
Python
|
examples/twisted/wamp/work/newapi/test_newapi_multiple_connections.py
|
meejah/AutobahnPython
|
54da8882eea3f4b1da62a6d3481556ab77720d41
|
[
"MIT"
] | null | null | null |
examples/twisted/wamp/work/newapi/test_newapi_multiple_connections.py
|
meejah/AutobahnPython
|
54da8882eea3f4b1da62a6d3481556ab77720d41
|
[
"MIT"
] | null | null | null |
examples/twisted/wamp/work/newapi/test_newapi_multiple_connections.py
|
meejah/AutobahnPython
|
54da8882eea3f4b1da62a6d3481556ab77720d41
|
[
"MIT"
] | null | null | null |
from twisted.internet import reactor
import txaio
from autobahn.twisted.wamp import Connection
def main1(connection):
print('main1 created', connection)
def on_join(session):
print('main1 joined', session)
session.leave()
connection.on_join(on_join)
def main2(connection):
print('main2 created', connection)
def on_join(session):
print('main2 joined', session)
session.leave()
connection.on_join(on_join)
def run(entry_points):
transports = [
{
"type": "websocket",
"url": "ws://127.0.0.1:8080/ws"
}
]
done = []
for main in entry_points:
connection = Connection(main, realm=u'public',
transports=transports, reactor=reactor)
done.append(connection.connect())
# deferred that fires when all connections are done
done = txaio.gather(done)
def finish(res):
print("all connections done", res)
reactor.stop()
done.addBoth(finish)
reactor.run()
if __name__ == '__main__':
return run([main1, main2])
| 19.5
| 55
| 0.620879
|
0d6435b01a9a2d9f5b1aec4525e81de834503045
| 104
|
py
|
Python
|
autodepgraph/node_functions/check_functions.py
|
jorgemfm27/AutoDepGraph
|
2a49613dbd01a9a49a274246d79ba84387be127b
|
[
"MIT"
] | 2
|
2019-10-14T17:55:03.000Z
|
2021-08-30T15:16:09.000Z
|
autodepgraph/node_functions/check_functions.py
|
jorgemfm27/AutoDepGraph
|
2a49613dbd01a9a49a274246d79ba84387be127b
|
[
"MIT"
] | 51
|
2017-04-26T14:35:59.000Z
|
2021-03-30T02:49:35.000Z
|
autodepgraph/node_functions/check_functions.py
|
jorgemfm27/AutoDepGraph
|
2a49613dbd01a9a49a274246d79ba84387be127b
|
[
"MIT"
] | 9
|
2017-09-18T18:41:20.000Z
|
2022-02-26T03:31:28.000Z
|
def return_fixed_value():
'''
Always return 1.0,
useful as a default
'''
return 1.0
| 14.857143
| 25
| 0.567308
|
659cb2cd696a8a37e4e8f9e8f6fc5a0aa05f0c63
| 1,789
|
py
|
Python
|
src/sagemaker_inference/utils.py
|
ericangelokim/sagemaker-inference-toolkit
|
24db871b1b193ac1a924c21be8c3ec48853b3263
|
[
"Apache-2.0"
] | null | null | null |
src/sagemaker_inference/utils.py
|
ericangelokim/sagemaker-inference-toolkit
|
24db871b1b193ac1a924c21be8c3ec48853b3263
|
[
"Apache-2.0"
] | null | null | null |
src/sagemaker_inference/utils.py
|
ericangelokim/sagemaker-inference-toolkit
|
24db871b1b193ac1a924c21be8c3ec48853b3263
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import re
CONTENT_TYPE_REGEX = re.compile('^[Cc]ontent-?[Tt]ype')
def read_file(path, mode='r'):
"""Read data from a file.
Args:
path (str): path to the file.
mode (str): mode which the file will be open.
Returns:
(str): contents of the file.
"""
with open(path, mode) as f:
return f.read()
def write_file(path, data, mode='w'): # type: (str, str, str) -> None
"""Write data to a file.
Args:
path (str): path to the file.
data (str): data to be written to the file.
mode (str): mode which the file will be open.
"""
with open(path, mode) as f:
f.write(data)
def retrieve_content_type_header(request_property):
"""Retrieve Content-Type header from incoming request.
This function handles multiple spellings of Content-Type based on the presence of
the dash and initial capitalization in each respective word.
Args:
request_property (dict): incoming request metadata
Returns:
(str): the request content type.
"""
for key in request_property:
if CONTENT_TYPE_REGEX.match(key):
return request_property[key]
| 27.953125
| 85
| 0.671884
|
ad0bd96d35d3f9e698ca4c1a71aa3e20ca6d2491
| 6,977
|
py
|
Python
|
lambda/python/project_start_trip.py
|
ankushjain2001/SecuroDrive
|
16ba110436d91ec657c409484797cbac7a392002
|
[
"MIT"
] | null | null | null |
lambda/python/project_start_trip.py
|
ankushjain2001/SecuroDrive
|
16ba110436d91ec657c409484797cbac7a392002
|
[
"MIT"
] | 1
|
2020-12-28T06:12:27.000Z
|
2020-12-28T06:12:27.000Z
|
lambda/python/project_start_trip.py
|
ankushjain2001/SecuroDrive
|
16ba110436d91ec657c409484797cbac7a392002
|
[
"MIT"
] | 2
|
2020-12-28T06:11:23.000Z
|
2021-01-10T07:04:17.000Z
|
import json
import paramiko
import boto3
import uuid
from random import randint
import os
import sys
import random, string
import time
import datetime
import math
import cv2
import base64
sns_client = boto3.client('sns')
ec_client = boto3.client('ec2')
db_client = boto3.client('dynamodb')
s3_client = boto3.client('s3')
rek_client = boto3.client('rekognition')
# Rekognition
def person_search(email):
vid = '/tmp/stream.mp4'
end_pt = 'https://s-a7395519.kinesisvideo.us-east-1.amazonaws.com'
kvm = boto3.client('kinesis-video-media',endpoint_url=end_pt)
person_rekognized = False
bucket = 'project-frontend-web'
key = 'img'
for i in range(1):
response = kvm.get_media(
StreamARN='arn:aws:kinesisvideo:us-east-1:841622902378:stream/AmazonRekognition_user_live_feed/1608478703022',
StartSelector={
'StartSelectorType': 'NOW'
}
)
with open(vid, 'wb') as f:
body = response['Payload'].read(1024*30)
f.write(body)
# Capture video
vidcap = cv2.VideoCapture(vid)
while vidcap.isOpened():
success, image = vidcap.read()
if success or image:
img_name = str(time.time())+'.jpeg'
cv2.imwrite('/tmp/'+img_name,image)
s3_client.upload_file('/tmp/'+img_name, bucket, f'{key}/unauth_users/{img_name}')
response = rek_client.compare_faces(
SourceImage={
'S3Object': {
'Bucket': bucket,
'Name': f'{key}/unauth_users/{img_name}'
}
},
TargetImage={
'S3Object': {
'Bucket': bucket,
'Name': f'{key}/users/{email}.jpeg'
}
},
SimilarityThreshold=85,
QualityFilter='AUTO'
)
s3_client.delete_object(Bucket=bucket, Key=f'{key}/unauth_users/{img_name}')
if len(response['FaceMatches'])>0:
if int(response['FaceMatches'][0]['Similarity'])>90:
print('Accuracy: ', int(response['FaceMatches'][0]['Similarity']))
person_rekognized =True
break
else:
break
vidcap.release()
if person_rekognized==True:
break
else:
s3_client.upload_file('/tmp/'+img_name, bucket, f'{key}/unauth_users/{img_name}')
# Success - BUT WRONG PERSON
return [person_rekognized, f'http://project-frontend-web.s3-website-us-east-1.amazonaws.com/img/unauth_users/{img_name}']
# Success - RIGHT PERSON
return [person_rekognized, None]
def lambda_handler(event, context):
hist_table = 'project-trip-history'
user_table = 'project-users'
s3_client.download_file('project-ecpem','linux_ami1.pem', '/tmp/file.pem')
# Data: trip data from frontend
email_id = event['email']
start = event['start']
end = event['end']
start_loc = str(start['loc'])
start_lat = str(start['lat'])
start_lon = str(start['lon'])
end_loc = str(end['loc'])
end_lat = str(end['lat'])
end_lon = str(end['lon'])
# Data: timestamp
start_time = str(time.time())
# --------- REKOGNITION ----------------------------------------------------
# Add face recognition her
# ... if person is rekognized then change the following flag to true...
# ... else make it false
# person_rekognized = True
try:
# PERSON IS ALWAYS THERE - CAN BE RIGHT OR WRONG
person_rekognized, img_url = person_search(event['email'])
except:
person_rekognized, img_url = False, "No person was detected in the camera."
# --------------------------------------------------------------------------
if person_rekognized:
# Data: unique trip id
trip_id = str(uuid.uuid4())
try:
response = db_client.get_item(TableName=hist_table,Key={'trip_id':{'S':trip_id}})
while 'Item' in response:
try:
trip_id = str(uuid.uuid4())
response = db_client.get_item(TableName=hist_table,Key={'trip_id':{'S':trip_id}})
except:
pass
except:
pass
# --------- EC2 --------------------------------------------------------
# reading pem file and creating key object
host = 'ec2-54-160-156-213.compute-1.amazonaws.com'
user = 'ec2-user'
key = paramiko.RSAKey.from_private_key_file("/tmp/file.pem")
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname=host,username=user, pkey = key)
print("Connected to :" + host)
#command = execfile( "makefile.py")
command = 'echo $$; exec ' + 'python3 stream_processor.py ' + email_id +' '+ trip_id
stdin, stdout, stderr = ssh.exec_command(command)
# Process id
process_id = str(stdout.readline())[:-1]
#-----------------------------------------------------------------------
# Save data to history DB
response = db_client.put_item(TableName=hist_table, Item={
"trip_id": {"S": trip_id},
"email_id": {"S": email_id},
"start_time": {"S": start_time},
"start_loc": {"S": start_loc},
"start_lat": {"S": start_lat},
"start_lon": {"S": start_lon},
"end_loc": {"S": end_loc},
"end_lat": {"S": end_lat},
"end_lon": {"S": end_lon},
"process_id": {"S": str(process_id)}
})
# Response data for frontend
response_data = {
"trip_id": trip_id
}
return {
'statusCode': 200,
'body': 'Success from Lambda!',
'response': response_data
}
else:
response_data = {"error": "Unrecognized personnel using the service. The owner has been notified."}
db_res = db_client.get_item(TableName=user_table, Key={'email':{'S':email_id}})
if 'Item' in db_res:
# Send SMS for unauthorized access
sns_client.publish(
PhoneNumber = str(db_res['Item']['phone_number']['S']),
Message = "An unrecognized personnel is using your Securodrive vehicle. Please check your vehicle for any unauthorized access. " +img_url
)
return {
'statusCode': 400,
'body': json.dumps('Hello from Lambda!'),
'response': response_data
}
| 34.711443
| 153
| 0.517988
|
d7b51163be7e7a93e15f27a342d2bed11b83da6a
| 1,300
|
py
|
Python
|
docs/example_erd.py
|
wmvanvliet/psychic
|
4ab75fb655795df0272c1bb0eb0dfeb232ffe143
|
[
"BSD-3-Clause"
] | 3
|
2015-06-13T16:51:36.000Z
|
2017-07-02T21:06:06.000Z
|
docs/example_erd.py
|
breuderink/psychic
|
a89dd821b801e315df785f633e88689f4f5a93bf
|
[
"BSD-3-Clause"
] | null | null | null |
docs/example_erd.py
|
breuderink/psychic
|
a89dd821b801e315df785f633e88689f4f5a93bf
|
[
"BSD-3-Clause"
] | null | null | null |
import logging, glob
import golem, psychic
import matplotlib.pyplot as plt
import numpy as np
from scipy import signal
def logvar_feat(x):
return np.log(np.var(x, axis=0))
def window(x):
return x * np.hanning(x.shape[0]).reshape(-1, 1)
# Define preprocessing pipeline
preprocessing = golem.nodes.Chain([
# Clip extreme values
psychic.nodes.Winsorize(),
# Filter to beta range (8--30 Hz)
psychic.nodes.Filter(lambda s : signal.iirfilter(6, [8./(s/2), 30./(s/2)])),
# Extract 2 second window centered on key-press
psychic.nodes.Slice({1:'left', 2:'right'}, [-.7, .7]),
])
pipeline = golem.nodes.Chain([
golem.nodes.FeatMap(window),
psychic.nodes.CSP(m=6),
golem.nodes.FeatMap(logvar_feat),
golem.nodes.SVM(C=1e5)])
# Setup logging levels
logging.basicConfig(level=logging.WARNING)
logging.getLogger('golem.nodes.ModelSelect').setLevel(logging.INFO)
# Load dataset (see also psychic.helpers.bdf_dataset for .bdf files)
d = golem.DataSet.load('S9.dat')
d0 = d
# Preprocess
preprocessing.train(d) # Required to calculate sampling rate
d = preprocessing.apply(d)
NTRAIN = 500
d, dtest = d[:NTRAIN], d[NTRAIN:]
# Build classifier
pipeline.train(d)
# Do predictions
pred = pipeline.apply(dtest)
print 'Acc:', golem.perf.accuracy(pred)
print 'AUC:', golem.perf.auc(pred)
| 24.528302
| 78
| 0.716923
|
b423014243456089ecd5aad37973fde04f1c6b99
| 14,334
|
py
|
Python
|
databench/analysis.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
databench/analysis.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
databench/analysis.py
|
springcoil/databench
|
dba2cb412da099a3eb970e4b0cf257a62b012958
|
[
"MIT"
] | null | null | null |
"""Analysis module for Databench."""
import os
import json
import time
import gevent
import logging
import zipstream
import subprocess
import geventwebsocket
import zmq.green as zmq
from flask import Blueprint, render_template, Response
class Analysis(object):
"""Databench's analysis class.
This contains the analysis code. Every browser connection corresponds to
and instance of this class.
**Incoming messages** are captured by specifying a class method starting
with ``on_`` followed by the signal name. To capture the frontend signal
``run`` that is emitted with the JavaScript code
.. code-block:: js
// on the JavaScript frontend
databench.emit('run', {my_param: 'helloworld'});
use
.. code-block:: python
# here in Python
def on_run(self, my_param):
here. The entries of a dictionary will be used as keyword arguments in the
function call; as in this example. If the emitted message is an array,
the entries will be used as positional arguments in the function call.
If the message is neither of type ``list`` nor ``dict`` (for example a
plain ``string`` or ``float``), the function will be called with that
as its first parameter.
**Outgoing messages** are sent using ``emit(signal_name, message)``.
For example, use
.. code-block:: python
self.emit('result', {'msg': 'done'})
to send the signal ``result`` with the message ``{'msg': 'done'}`` to
the frontend.
"""
def __init__(self):
pass
def set_emit_fn(self, emit_fn):
"""Sets what the emit function for this analysis will be."""
self.emit = emit_fn
"""Events."""
def onall(self, message_data):
logging.debug('onall called.')
def on_connect(self):
logging.debug('on_connect called.')
def on_disconnect(self):
logging.debug('on_disconnect called.')
class Meta(object):
"""
Args:
name (str): Name of this analysis. If ``signals`` is not specified,
this also becomes the namespace for the WebSocket connection and
has to match the frontend's :js:class:`Databench` ``name``.
import_name (str): Usually the file name ``__name__`` where this
analysis is instantiated.
description (str): Usually the ``__doc__`` string of the analysis.
analysis_class (:class:`databench.Analysis`): Object
that should be instantiated for every new websocket connection.
For standard use cases, you don't have to modify this class. However,
If you want to serve more than the ``index.html`` page, say a
``details.html`` page, you can derive from this class and add this
to the constructor
.. code-block:: python
self.blueprint.add_url_rule('/details.html', 'render_details',
self.render_details)
and add a new method to the class
.. code-block:: python
def render_details(self):
return render_template(
self.name+'/details.html',
analysis_description=self.description
)
and create the file ``details.html`` similar to ``index.html``.
"""
all_instances = []
def __init__(
self,
name,
import_name,
description,
analysis_class,
):
Meta.all_instances.append(self)
self.show_in_index = True
self.name = name
self.import_name = import_name
self.header = {'logo': '/static/logo.svg', 'title': 'Databench'}
self.description = description
self.analysis_class = analysis_class
analyses_path = os.getcwd()+'/'+'analyses'
if not os.path.exists(analyses_path):
analyses_path = os.getcwd()+'/'+'databench/analyses_packaged'
if not os.path.exists(analyses_path):
logging.info('Folder for '+self.name+' not found.')
self.analyses_path = analyses_path
# detect whether thumbnail.png is present
if os.path.isfile(analyses_path+'/'+self.name+'/thumbnail.png'):
self.thumbnail = 'thumbnail.png'
self.blueprint = Blueprint(
name,
import_name,
template_folder=analyses_path,
static_folder=analyses_path+'/'+self.name,
static_url_path='/static',
)
self.blueprint.add_url_rule('/', 'render_template',
self.render_template)
self.blueprint.add_url_rule('/<templatename>', 'render_template',
self.render_template)
self.blueprint.add_url_rule('/'+name+'.zip', 'zip_analysis',
self.zip_analysis, methods=['GET'])
self.sockets = None
def render_template(self, templatename='index.html'):
"""Renders the main analysis frontend template."""
logging.debug('Rendering '+templatename)
return render_template(
self.name+'/'+templatename,
header=self.header,
analysis_name=self.name,
analysis_description=self.description,
)
def zip_analysis(self):
def generator():
z = zipstream.ZipFile(mode='w',
compression=zipstream.ZIP_DEFLATED)
# find all analysis files
folder = self.analyses_path+'/'+self.name
for root, dirnames, filenames in os.walk(folder):
invisible_dirs = [d for d in dirnames if d[0] == '.']
for d in invisible_dirs:
dirnames.remove(d)
for filename in filenames:
if filename[0] == '.':
continue
if filename[-4:] == '.pyc':
continue
# add the file to zipstream
fullname = os.path.join(root, filename)
arcname = fullname.replace(self.analyses_path+'/', '')
z.write(fullname, arcname=arcname)
# add requirements.txt if present
if os.path.isfile(self.analyses_path+'/requirements.txt'):
z.write(self.analyses_path+'/requirements.txt')
for chunk in z:
yield chunk
response = Response(generator(), mimetype='application/zip')
response.headers['Content-Disposition'] = \
'attachment; filename='+self.name+'.zip'
return response
def wire_sockets(self, sockets, url_prefix=''):
self.sockets = sockets
self.sockets.add_url_rule(url_prefix+'/ws', 'ws_serve', self.ws_serve)
def instantiate_analysis_class(self):
return self.analysis_class()
@staticmethod
def run_action(analysis, fn_name, message):
"""Executes an action in the analysis with the given message. It
also handles the start and stop signals in case an action_id
is given."""
# detect action_id
action_id = None
if isinstance(message, dict) and '__action_id' in message:
action_id = message['__action_id']
del message['__action_id']
if action_id:
analysis.emit('__action', {'id': action_id, 'status': 'start'})
fn = getattr(analysis, fn_name)
# Check whether this is a list (positional arguments)
# or a dictionary (keyword arguments).
if isinstance(message, list):
fn(*message)
elif isinstance(message, dict):
fn(**message)
else:
fn(message)
if action_id:
analysis.emit('__action', {'id': action_id, 'status': 'end'})
def ws_serve(self, ws):
"""Handle a new websocket connection."""
logging.debug('ws_serve()')
def emit(signal, message):
try:
ws.send(json.dumps({'signal': signal, 'load': message}))
except geventwebsocket.WebSocketError:
logging.info('websocket closed. could not send: '+signal +
' -- '+str(message))
analysis_instance = self.instantiate_analysis_class()
logging.debug("analysis instantiated")
analysis_instance.set_emit_fn(emit)
greenlets = []
greenlets.append(gevent.Greenlet.spawn(
analysis_instance.on_connect
))
def process_message(message):
if message is None:
logging.debug('empty message received.')
return
message_data = json.loads(message)
analysis_instance.onall(message_data)
if 'signal' not in message_data or 'load' not in message_data:
logging.info('message not processed: '+message)
return
fn_name = 'on_'+message_data['signal']
if not hasattr(self.analysis_class, fn_name):
logging.warning('frontend wants to call '+fn_name +
' which is not in the Analysis class.')
return
logging.debug('calling '+fn_name)
# every 'on_' is processed in a separate greenlet
greenlets.append(gevent.Greenlet.spawn(
Meta.run_action, analysis_instance,
fn_name, message_data['load']
))
while True:
try:
message = ws.receive()
logging.debug('received message: '+str(message))
process_message(message)
except geventwebsocket.WebSocketError:
break
# disconnected
logging.debug("disconnecting analysis instance")
gevent.killall(greenlets)
analysis_instance.on_disconnect()
class AnalysisZMQ(Analysis):
def __init__(self, namespace, instance_id, zmq_publish):
self.namespace = namespace
self.instance_id = instance_id
self.zmq_publish = zmq_publish
def onall(self, message_data):
msg = {
'analysis': self.namespace,
'instance_id': self.instance_id,
'frame': message_data,
}
self.zmq_publish.send_json(msg)
logging.debug('onall called with: '+str(msg))
def on_connect(self):
msg = {
'analysis': self.namespace,
'instance_id': self.instance_id,
'frame': {'signal': 'connect', 'load': {}},
}
self.zmq_publish.send_json(msg)
logging.debug('on_connect called')
def on_disconnect(self):
msg = {
'analysis': self.namespace,
'instance_id': self.instance_id,
'frame': {'signal': 'disconnect', 'load': {}},
}
self.zmq_publish.send_json(msg)
logging.debug('on_disconnect called')
class MetaZMQ(Meta):
"""A Meta class that pipes all messages to ZMQ and back.
The entire ZMQ interface of Databench is defined here and in
:class`AnalysisZMQ`.
"""
def __init__(
self,
name,
import_name,
description,
executable,
zmq_publish,
port_subscribe=None,
):
Meta.__init__(self, name, import_name, description, AnalysisZMQ)
self.zmq_publish = zmq_publish
self.zmq_analysis_id = 0
self.zmq_analyses = {}
self.zmq_confirmed = False
# check whether we have to determine port_subscribe ourselves first
if port_subscribe is None:
context = zmq.Context()
socket = context.socket(zmq.PUB)
port_subscribe = socket.bind_to_random_port(
'tcp://127.0.0.1',
min_port=3000, max_port=9000,
)
context.destroy()
logging.debug('determined: port_subscribe='+str(port_subscribe))
# zmq subscription to listen for messages from backend
logging.debug('main listening on port: '+str(port_subscribe))
self.zmq_sub = zmq.Context().socket(zmq.SUB)
self.zmq_sub.connect('tcp://127.0.0.1:'+str(port_subscribe))
self.zmq_sub.setsockopt(zmq.SUBSCRIBE, '')
# @copy_current_request_context
def zmq_listener():
while True:
msg = self.zmq_sub.recv_json()
self.zmq_confirmed = True
logging.debug('main ('+') received '
'msg: '+str(msg))
if 'description' in msg:
self.description = msg['description']
if 'instance_id' in msg and \
msg['instance_id'] in self.zmq_analyses:
analysis = self.zmq_analyses[msg['instance_id']]
del msg['instance_id']
if 'frame' in msg and \
'signal' in msg['frame'] and \
'load' in msg['frame']:
analysis.emit(msg['frame']['signal'],
msg['frame']['load'])
else:
logging.debug('dont understand this message: ' +
str(msg))
else:
logging.debug('instance_id not in message or '
'AnalysisZMQ with that id not found.')
self.zmq_listener = gevent.Greenlet.spawn(zmq_listener)
# launch the language kernel process
self.kernel_process = subprocess.Popen(executable, shell=False)
# init language kernel
def sending_init():
while not self.zmq_confirmed:
logging.debug('init kernel '+self.name+' to publish on '
'port '+str(port_subscribe))
self.zmq_publish.send_json({
'analysis': self.name,
'publish_on_port': port_subscribe,
})
time.sleep(0.1)
gevent.Greenlet.spawn(sending_init)
def instantiate_analysis_class(self):
self.zmq_analysis_id += 1
i = self.analysis_class(self.name,
self.zmq_analysis_id,
self.zmq_publish)
self.zmq_analyses[self.zmq_analysis_id] = i
return i
| 33.966825
| 78
| 0.570183
|
99747e04a3c7b93269c6c152c52fd212cabea800
| 1,492
|
py
|
Python
|
tests/python/testing.py
|
xhuang4/xgboost
|
37ba0c8c2ecc569c747456ac6f802397e6ace65d
|
[
"Apache-2.0"
] | 18
|
2020-08-11T08:52:29.000Z
|
2021-11-14T03:30:03.000Z
|
tests/python/testing.py
|
xhuang4/xgboost
|
37ba0c8c2ecc569c747456ac6f802397e6ace65d
|
[
"Apache-2.0"
] | null | null | null |
tests/python/testing.py
|
xhuang4/xgboost
|
37ba0c8c2ecc569c747456ac6f802397e6ace65d
|
[
"Apache-2.0"
] | 5
|
2020-09-14T10:58:09.000Z
|
2021-08-17T13:12:59.000Z
|
# coding: utf-8
from xgboost.compat import SKLEARN_INSTALLED, PANDAS_INSTALLED, DT_INSTALLED
from xgboost.compat import CUDF_INSTALLED, DASK_INSTALLED
def no_sklearn():
return {'condition': not SKLEARN_INSTALLED,
'reason': 'Scikit-Learn is not installed'}
def no_dask():
return {'condition': not DASK_INSTALLED,
'reason': 'Dask is not installed'}
def no_pandas():
return {'condition': not PANDAS_INSTALLED,
'reason': 'Pandas is not installed.'}
def no_dt():
return {'condition': not DT_INSTALLED,
'reason': 'Datatable is not installed.'}
def no_matplotlib():
reason = 'Matplotlib is not installed.'
try:
import matplotlib.pyplot as _ # noqa
return {'condition': False,
'reason': reason}
except ImportError:
return {'condition': True,
'reason': reason}
def no_dask_cuda():
reason = 'dask_cuda is not installed.'
try:
import dask_cuda as _ # noqa
return {'condition': False, 'reason': reason}
except ImportError:
return {'condition': True, 'reason': reason}
def no_cudf():
return {'condition': not CUDF_INSTALLED,
'reason': 'CUDF is not installed'}
def no_dask_cudf():
reason = 'dask_cudf is not installed.'
try:
import dask_cudf as _ # noqa
return {'condition': False, 'reason': reason}
except ImportError:
return {'condition': True, 'reason': reason}
| 25.724138
| 76
| 0.626676
|
18524e7ce1368f0c1b14f5cf0a29af45cbb7e7e5
| 2,393
|
py
|
Python
|
src/model/CrevNet/i_RevNet_Block.py
|
dreaming-coder/DeepLab
|
3020544e2f9e139dde7bd04f6ff59e6f44d49c6e
|
[
"Apache-2.0"
] | 3
|
2021-05-31T09:25:59.000Z
|
2022-03-10T08:09:51.000Z
|
src/model/CrevNet/i_RevNet_Block.py
|
dreaming-coder/DeepLab
|
3020544e2f9e139dde7bd04f6ff59e6f44d49c6e
|
[
"Apache-2.0"
] | 1
|
2021-09-26T16:37:39.000Z
|
2021-09-28T00:43:05.000Z
|
src/model/CrevNet/i_RevNet_Block.py
|
dreaming-coder/DeepLab
|
3020544e2f9e139dde7bd04f6ff59e6f44d49c6e
|
[
"Apache-2.0"
] | null | null | null |
from typing import Sequence
from torch import nn, Tensor
__all__ = ["i_RevNet_Block"]
# noinspection PyShadowingNames
class i_RevNet_Block(nn.Module):
"""
经过 i-RevNet 后,通道数和尺寸相等
"""
def __init__(self, channels: int):
super(i_RevNet_Block, self).__init__()
self.channels = channels
if channels // 4 == 0:
ch = 1
else:
ch = channels // 4
self.bottleneck_block1 = nn.Sequential(
nn.Conv2d(in_channels=channels, out_channels=ch, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=ch, affine=True),
nn.ReLU(),
nn.Conv2d(in_channels=ch, out_channels=ch, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=ch, affine=True),
nn.ReLU(),
nn.Conv2d(in_channels=ch, out_channels=channels, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=channels, affine=True),
nn.ReLU()
)
self.bottleneck_block2 = nn.Sequential(
nn.Conv2d(in_channels=channels, out_channels=ch, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=ch, affine=True),
nn.ReLU(),
nn.Conv2d(in_channels=ch, out_channels=ch, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=ch, affine=True),
nn.ReLU(),
nn.Conv2d(in_channels=ch, out_channels=channels, kernel_size=3,
padding=1, stride=1, bias=False),
nn.GroupNorm(num_groups=1, num_channels=channels, affine=True),
nn.ReLU()
)
def forward(self, x: Sequence[Tensor]):
x1 = x[0] # 前一时刻输入
x2 = x[1] # 后一时刻输入
Fx1 = self.bottleneck_block1(x1)
x2 = x2 + Fx1
Fx2 = self.bottleneck_block2(x2)
x1 = x1 + Fx2
return x1, x2
def inverse(self, x: Sequence[Tensor]):
x1 = x[0] # 前一时刻输出
x2 = x[1] # 后一时刻输出
Fx2 = self.bottleneck_block2(x2)
x1 = x1-Fx2
Fx1 = self.bottleneck_block1(x1)
x2 = x2 - Fx1
return x1, x2
| 27.505747
| 75
| 0.559131
|
f611fbdb93ab39dba3c4ba441cb4d7f7fb42a86c
| 260
|
py
|
Python
|
config.py
|
dacerb/pywords-challenge
|
0a6604d14c60a6e5de2df17d30cd4421628a2ea0
|
[
"MIT"
] | null | null | null |
config.py
|
dacerb/pywords-challenge
|
0a6604d14c60a6e5de2df17d30cd4421628a2ea0
|
[
"MIT"
] | null | null | null |
config.py
|
dacerb/pywords-challenge
|
0a6604d14c60a6e5de2df17d30cd4421628a2ea0
|
[
"MIT"
] | null | null | null |
## Ruta donde se encuentran las colecciones
FILE_PATH = "../collection/"
## Configuracion para la conexion a MongoDB
MONGO_HOST ="localhost"
MONGO_PORT = 27017
MONGO_DB ="test"
MONGO_USER ="test_usr"
MONGO_PASS ="SuperTest"
MONGO_COLLECTION ="collection"
| 23.636364
| 43
| 0.761538
|
45ae92bd4b6765aa039c1ea404339e7f6e897e49
| 7,174
|
py
|
Python
|
train.py
|
WeiweiDuan/TOD
|
d3005eecef1f1dcb08fce99cb56f09ba46e36e82
|
[
"Unlicense"
] | null | null | null |
train.py
|
WeiweiDuan/TOD
|
d3005eecef1f1dcb08fce99cb56f09ba46e36e82
|
[
"Unlicense"
] | null | null | null |
train.py
|
WeiweiDuan/TOD
|
d3005eecef1f1dcb08fce99cb56f09ba46e36e82
|
[
"Unlicense"
] | null | null | null |
from keras.layers import Lambda, Input, Dense, Merge, Concatenate,Multiply, Add, add, Activation
from keras.models import Model
from keras.datasets import mnist
from keras.losses import mse, binary_crossentropy
from keras.utils import plot_model
from keras import backend as K
from keras.optimizers import *
from keras.callbacks import ModelCheckpoint
from keras import metrics
from utils import load_data, data_augmentation
import numpy as np
import matplotlib.pyplot as plt
import argparse
import os
import cv2
from models import vae, categorical
from keras import metrics
from sklearn.cluster import KMeans
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
from sklearn.decomposition import PCA
import tensorflow as tf
import keras
from keras.datasets import mnist
def remove_files(path):
for root, directory, files in os.walk(path):
for fname in files:
os.remove(os.path.join(root, fname))
return 0
def sampling(args):
z_mean, z_log_var = args
batch = K.shape(z_mean)[0]
dim = K.int_shape(z_mean)[1]
# by default, random_normal has mean = 0 and std = 1.0
epsilon = K.random_normal(shape=(batch, dim))
return z_mean + K.exp(0.5 * z_log_var) * epsilon
os.environ["CUDA_VISIBLE_DEVICES"]="0"
MAP_PATH = './data/toronto.png'
TARGET_SAMPLE_DIR = "./data/target_samples"
MASK_PATH = './data/parkinglot.png'
SUBSET_PATH = './data/subset'
SHIFT_LIST = [-10,-5,0,5,10] #
ROTATION_ANGLE = []#0,5,10,15,20,340,345,355
IMG_SIZE = 28
EPOCHS = 1000
LEARNING_RATE = 0.0001
VAE_MODEL_PATH = ''
LOG_DIR = './logs'
MODEL_PATH = ''
# batch_size = 75
latent_dim = 32#32, 5
intermediate_dim = 512#128, 512
num_cls = 10
optimizer = Adam(lr=LEARNING_RATE)
# optimizer = RMSprop(lr=LEARNING_RATE)
initializer = 'glorot_normal'#'random_uniform'#
original_dim = IMG_SIZE*IMG_SIZE*1
w_recons, w_kl, w_ce = 28.0*28.0, 1.0, 100.0
def qz_graph(x, y, intermediate_dim=512,latent_dim=32):
concat = Concatenate(axis=-1)([x, y])
layer1 = Dense(intermediate_dim, activation='relu',kernel_initializer = initializer)(concat)
layer2 = Dense(intermediate_dim, activation='relu',kernel_initializer = initializer)(layer1)
z_mean = Dense(latent_dim,kernel_initializer = initializer)(layer2)
z_var = Dense(latent_dim,kernel_initializer = initializer)(layer2)
z = Lambda(sampling, output_shape=(latent_dim,))([z_mean, z_var])
return z_mean, z_var, z
def qy_graph(x, num_cls=10):
layer1 = Dense(256, activation='relu',kernel_initializer = initializer)(x)#256. 64
layer2 = Dense(128, activation='relu',kernel_initializer = initializer)(layer1)#128, 32
qy_logit = Dense(num_cls,kernel_initializer = initializer)(layer2)
qy = Activation(tf.nn.softmax)(qy_logit)
return qy_logit, qy
def px_graph(z, intermediate_dim=512, original_dim=40*40*3):
layer1 = Dense(intermediate_dim, activation='relu',kernel_initializer = initializer)(z)
layer2 = Dense(intermediate_dim, activation='relu',kernel_initializer = initializer)(layer1)
reconstruction = Dense(original_dim, activation='sigmoid',kernel_initializer = initializer)(layer2)
return reconstruction
def pzy_graph(y, latent_dim=32):
h = Dense(16, activation='relu',kernel_initializer = initializer)(y)#128
h = Dense(8, activation='relu',kernel_initializer = initializer)(h)#256, 64
zp_mean = Dense(latent_dim,kernel_initializer = initializer)(h)
zp_var = Dense(latent_dim,kernel_initializer = initializer)(h)
return zp_mean, zp_var
def loss(x, xp, zm, zv, zm_prior, zv_prior, w_mse, w_kl):
reconstruction_loss = mse(x, xp)
reconstruction_loss *= w_mse
kl_loss = (zv_prior-zv)*0.5 + (K.square(zm-zm_prior) + K.exp(zv)) / (2*K.exp(zv_prior)+1e-10) - 0.5
kl_loss = K.sum(kl_loss, axis=-1) * w_kl
return reconstruction_loss + kl_loss
def kl_loss(zm, zv, zm_prior, zv_prior, weight):
loss = (zv_prior-zv)*0.5 + (np.square(zm-zm_prior) + np.exp(zv)) / 2*np.exp(zv_prior) - 0.5
loss = np.sum(loss, axis=-1) * weight
return loss
def mse_loss(x, xp, weight):
return (np.square(x - xp)).mean(axis=None) * weight
def ce_loss(yp, weight):
return (yp * np.log(yp / np.array([0.20,0.20,0.20,0.20,0.20]))).mean(axis=None) * weight
x, y = Input(shape=(original_dim,)), Input(shape=(num_cls,))
sub_enc = Model([x,y],qz_graph(x, y, intermediate_dim=intermediate_dim, latent_dim=latent_dim))
z = Input(shape=(latent_dim,))
sub_dec = Model(z, px_graph(z, intermediate_dim=intermediate_dim, original_dim=original_dim))
x_u = Input(shape=(original_dim,), name='x_u')
x_l = Input(shape=(original_dim,), name='x_l')
y0 = Input(shape=(num_cls,), name='y0_inputs')
y1 = Input(shape=(num_cls,), name='y1_inputs')
zm_p0,zv_p0 = pzy_graph(y0, latent_dim=latent_dim)
zm_p1,zv_p1 = pzy_graph(y1, latent_dim=latent_dim)
# zm0, zv0, z0 = qz_graph(x_u, y0, intermediate_dim=intermediate_dim, latent_dim=latent_dim)
zm0, zv0, z0 = sub_enc([x_u, y0])
zm_l, zv_l, z_l = sub_enc([x_l, y0])
zm1, zv1, z1 = qz_graph(x_u, y1, intermediate_dim=intermediate_dim, latent_dim=latent_dim)
# xp_u0 = px_graph(z0, intermediate_dim=intermediate_dim, original_dim=original_dim)
xp_u0 = sub_dec(z0)
xp_l = sub_dec(z_l)
xp_u1 = px_graph(z1, intermediate_dim=intermediate_dim, original_dim=original_dim)
qy_logit, qy = qy_graph(x_u)
vae = Model([x_u,x_l,y0,y1], [xp_l,xp_u0,xp_u1,qy,zm_l,zv_l,zm0,zv0,zm1,zv1,zm_p0,zv_p0,zm_p1,zv_p1])
cat_loss = qy * K.log(qy / K.constant(np.array([0.5,0.5])))
cat_loss = K.sum(cat_loss, axis=-1) * w_ce
vae_loss = qy[:,0]*loss(x_u,xp_u0,zm0,zv0,zm_p0,zv_p0,w_recons,w_kl)+\
qy[:,1]*loss(x_u,xp_u1,zm1,zv1,zm_p1,zv_p1,w_recons,w_kl)+\
loss(x_l,xp_l,zm_l,zv_l,zm_p0,zv_p0,w_recons,w_kl) + cat_loss
vae.add_loss(vae_loss)
vae.summary()
# load data
x_u, _ = load_data.load_wetland_samples(SUBSET_PATH)
np.random.shuffle(x_u)
x_l, target_name = load_data.load_wetland_samples(TARGET_SAMPLE_DIR)
x_l_aug = data_augmentation.data_aug(x_l, SHIFT_LIST, ROTATION_ANGLE)
np.random.shuffle(x_l_aug)
x_l = np.reshape(x_l, [-1, IMG_SIZE*IMG_SIZE*3])
x_l_aug = np.reshape(x_l_aug, [-1, IMG_SIZE*IMG_SIZE*3])
x_u = np.reshape(x_u, [-1, IMG_SIZE*IMG_SIZE*3])
image_size = x_u.shape[1]
original_dim = image_size
x_u = x_u.astype('float32') / 255
x_l = x_l.astype('float32') / 255
x_l_aug = x_l_aug.astype('float32') / 255
np.random.shuffle(x_l_aug)
x_l_aug = np.vstack((x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug, x_l_aug,\
x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,\
x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,\
x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug,x_l_aug))
print('target samples shape: ', x_l_aug.shape)
print('all samples shape: ', x_u.shape)
vae.compile(optimizer=optimizer, loss=None)
checkpoint = ModelCheckpoint('./logs/weights{epoch:08d}.h5',save_weights_only=True, period=100)
# vae.load_weights('./logs/weights00000100.h5')
vae.fit([_x_u,_x_l,np.array([[1,0]]*batch_size),np.array([[0,1]]*batch_size)],\
epochs=200, batch_size=200, verbose=1, callbacks=[checkpoint])
vae.save_weights('dection.hdf5')
| 37.957672
| 103
| 0.727628
|
bc8f6b5f3ad2ea1c4ec69f1e6d9529683dfd5b33
| 29,788
|
py
|
Python
|
few_nlu/few_nlu.py
|
duzx16/P-tuning-v2
|
b62f4581e45d7975bd3b6554449bffb87d43f101
|
[
"Apache-2.0"
] | null | null | null |
few_nlu/few_nlu.py
|
duzx16/P-tuning-v2
|
b62f4581e45d7975bd3b6554449bffb87d43f101
|
[
"Apache-2.0"
] | null | null | null |
few_nlu/few_nlu.py
|
duzx16/P-tuning-v2
|
b62f4581e45d7975bd3b6554449bffb87d43f101
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors and the HuggingFace Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""The SuperGLUE benchmark."""
import json
import os
import datasets
_SUPER_GLUE_CITATION = """\
@article{wang2019superglue,
title={SuperGLUE: A Stickier Benchmark for General-Purpose Language Understanding Systems},
author={Wang, Alex and Pruksachatkun, Yada and Nangia, Nikita and Singh, Amanpreet and Michael, Julian and Hill, Felix and Levy, Omer and Bowman, Samuel R},
journal={arXiv preprint arXiv:1905.00537},
year={2019}
}
Note that each SuperGLUE dataset has its own citation. Please see the source to
get the correct citation for each contained dataset.
"""
_GLUE_DESCRIPTION = """\
SuperGLUE (https://super.gluebenchmark.com/) is a new benchmark styled after
GLUE with a new set of more difficult language understanding tasks, improved
resources, and a new public leaderboard.
"""
_BOOLQ_DESCRIPTION = """\
BoolQ (Boolean Questions, Clark et al., 2019a) is a QA task where each example consists of a short
passage and a yes/no question about the passage. The questions are provided anonymously and
unsolicited by users of the Google search engine, and afterwards paired with a paragraph from a
Wikipedia article containing the answer. Following the original work, we evaluate with accuracy."""
_CB_DESCRIPTION = """\
The CommitmentBank (De Marneffe et al., 2019) is a corpus of short texts in which at least
one sentence contains an embedded clause. Each of these embedded clauses is annotated with the
degree to which we expect that the person who wrote the text is committed to the truth of the clause.
The resulting task framed as three-class textual entailment on examples that are drawn from the Wall
Street Journal, fiction from the British National Corpus, and Switchboard. Each example consists
of a premise containing an embedded clause and the corresponding hypothesis is the extraction of
that clause. We use a subset of the data that had inter-annotator agreement above 0.85. The data is
imbalanced (relatively fewer neutral examples), so we evaluate using accuracy and F1, where for
multi-class F1 we compute the unweighted average of the F1 per class."""
_COPA_DESCRIPTION = """\
The Choice Of Plausible Alternatives (COPA, Roemmele et al., 2011) dataset is a causal
reasoning task in which a system is given a premise sentence and two possible alternatives. The
system must choose the alternative which has the more plausible causal relationship with the premise.
The method used for the construction of the alternatives ensures that the task requires causal reasoning
to solve. Examples either deal with alternative possible causes or alternative possible effects of the
premise sentence, accompanied by a simple question disambiguating between the two instance
types for the model. All examples are handcrafted and focus on topics from online blogs and a
photography-related encyclopedia. Following the recommendation of the authors, we evaluate using
accuracy."""
_RECORD_DESCRIPTION = """\
(Reading Comprehension with Commonsense Reasoning Dataset, Zhang et al., 2018) is a
multiple-choice QA task. Each example consists of a news article and a Cloze-style question about
the article in which one entity is masked out. The system must predict the masked out entity from a
given list of possible entities in the provided passage, where the same entity may be expressed using
multiple different surface forms, all of which are considered correct. Articles are drawn from CNN
and Daily Mail. Following the original work, we evaluate with max (over all mentions) token-level
F1 and exact match (EM)."""
_RTE_DESCRIPTION = """\
The Recognizing Textual Entailment (RTE) datasets come from a series of annual competitions
on textual entailment, the problem of predicting whether a given premise sentence entails a given
hypothesis sentence (also known as natural language inference, NLI). RTE was previously included
in GLUE, and we use the same data and format as before: We merge data from RTE1 (Dagan
et al., 2006), RTE2 (Bar Haim et al., 2006), RTE3 (Giampiccolo et al., 2007), and RTE5 (Bentivogli
et al., 2009). All datasets are combined and converted to two-class classification: entailment and
not_entailment. Of all the GLUE tasks, RTE was among those that benefited from transfer learning
the most, jumping from near random-chance performance (~56%) at the time of GLUE's launch to
85% accuracy (Liu et al., 2019c) at the time of writing. Given the eight point gap with respect to
human performance, however, the task is not yet solved by machines, and we expect the remaining
gap to be difficult to close."""
_MULTIRC_DESCRIPTION = """\
The Multi-Sentence Reading Comprehension dataset (MultiRC, Khashabi et al., 2018)
is a true/false question-answering task. Each example consists of a context paragraph, a question
about that paragraph, and a list of possible answers to that question which must be labeled as true or
false. Question-answering (QA) is a popular problem with many datasets. We use MultiRC because
of a number of desirable properties: (i) each question can have multiple possible correct answers,
so each question-answer pair must be evaluated independent of other pairs, (ii) the questions are
designed such that answering each question requires drawing facts from multiple context sentences,
and (iii) the question-answer pair format more closely matches the API of other SuperGLUE tasks
than span-based extractive QA does. The paragraphs are drawn from seven domains including news,
fiction, and historical text."""
_WIC_DESCRIPTION = """\
The Word-in-Context (WiC, Pilehvar and Camacho-Collados, 2019) dataset supports a word
sense disambiguation task cast as binary classification over sentence pairs. Given two sentences and a
polysemous (sense-ambiguous) word that appears in both sentences, the task is to determine whether
the word is used with the same sense in both sentences. Sentences are drawn from WordNet (Miller,
1995), VerbNet (Schuler, 2005), and Wiktionary. We follow the original work and evaluate using
accuracy."""
_WSC_DESCRIPTION = """\
The Winograd Schema Challenge (WSC, Levesque et al., 2012) is a reading comprehension
task in which a system must read a sentence with a pronoun and select the referent of that pronoun
from a list of choices. Given the difficulty of this task and the headroom still left, we have included
WSC in SuperGLUE and recast the dataset into its coreference form. The task is cast as a binary
classification problem, as opposed to N-multiple choice, in order to isolate the model's ability to
understand the coreference links within a sentence as opposed to various other strategies that may
come into play in multiple choice conditions. With that in mind, we create a split with 65% negative
majority class in the validation set, reflecting the distribution of the hidden test set, and 52% negative
class in the training set. The training and validation examples are drawn from the original Winograd
Schema dataset (Levesque et al., 2012), as well as those distributed by the affiliated organization
Commonsense Reasoning. The test examples are derived from fiction books and have been shared
with us by the authors of the original dataset. Previously, a version of WSC recast as NLI as included
in GLUE, known as WNLI. No substantial progress was made on WNLI, with many submissions
opting to submit only majority class predictions. WNLI was made especially difficult due to an
adversarial train/dev split: Premise sentences that appeared in the training set sometimes appeared
in the development set with a different hypothesis and a flipped label. If a system memorized the
training set without meaningfully generalizing, which was easy due to the small size of the training
set, it could perform far below chance on the development set. We remove this adversarial design
in the SuperGLUE version of WSC by ensuring that no sentences are shared between the training,
validation, and test sets.
However, the validation and test sets come from different domains, with the validation set consisting
of ambiguous examples such that changing one non-noun phrase word will change the coreference
dependencies in the sentence. The test set consists only of more straightforward examples, with a
high number of noun phrases (and thus more choices for the model), but low to no ambiguity."""
_AXB_DESCRIPTION = """\
An expert-constructed,
diagnostic dataset that automatically tests models for a broad range of linguistic, commonsense, and
world knowledge. Each example in this broad-coverage diagnostic is a sentence pair labeled with
a three-way entailment relation (entailment, neutral, or contradiction) and tagged with labels that
indicate the phenomena that characterize the relationship between the two sentences. Submissions
to the GLUE leaderboard are required to include predictions from the submission's MultiNLI
classifier on the diagnostic dataset, and analyses of the results were shown alongside the main
leaderboard. Since this broad-coverage diagnostic task has proved difficult for top models, we retain
it in SuperGLUE. However, since MultiNLI is not part of SuperGLUE, we collapse contradiction
and neutral into a single not_entailment label, and request that submissions include predictions
on the resulting set from the model used for the RTE task.
"""
_AXG_DESCRIPTION = """\
Winogender is designed to measure gender
bias in coreference resolution systems. We use the Diverse Natural Language Inference Collection
(DNC; Poliak et al., 2018) version that casts Winogender as a textual entailment task. Each example
consists of a premise sentence with a male or female pronoun and a hypothesis giving a possible
antecedent of the pronoun. Examples occur in minimal pairs, where the only difference between
an example and its pair is the gender of the pronoun in the premise. Performance on Winogender
is measured with both accuracy and the gender parity score: the percentage of minimal pairs for
which the predictions are the same. We note that a system can trivially obtain a perfect gender parity
score by guessing the same class for all examples, so a high gender parity score is meaningless unless
accompanied by high accuracy. As a diagnostic test of gender bias, we view the schemas as having high
positive predictive value and low negative predictive value; that is, they may demonstrate the presence
of gender bias in a system, but not prove its absence.
"""
_BOOLQ_CITATION = """\
@inproceedings{clark2019boolq,
title={BoolQ: Exploring the Surprising Difficulty of Natural Yes/No Questions},
author={Clark, Christopher and Lee, Kenton and Chang, Ming-Wei, and Kwiatkowski, Tom and Collins, Michael, and Toutanova, Kristina},
booktitle={NAACL},
year={2019}
}"""
_CB_CITATION = """\
@article{de marneff_simons_tonhauser_2019,
title={The CommitmentBank: Investigating projection in naturally occurring discourse},
journal={proceedings of Sinn und Bedeutung 23},
author={De Marneff, Marie-Catherine and Simons, Mandy and Tonhauser, Judith},
year={2019}
}"""
_COPA_CITATION = """\
@inproceedings{roemmele2011choice,
title={Choice of plausible alternatives: An evaluation of commonsense causal reasoning},
author={Roemmele, Melissa and Bejan, Cosmin Adrian and Gordon, Andrew S},
booktitle={2011 AAAI Spring Symposium Series},
year={2011}
}"""
_RECORD_CITATION = """\
@article{zhang2018record,
title={Record: Bridging the gap between human and machine commonsense reading comprehension},
author={Zhang, Sheng and Liu, Xiaodong and Liu, Jingjing and Gao, Jianfeng and Duh, Kevin and Van Durme, Benjamin},
journal={arXiv preprint arXiv:1810.12885},
year={2018}
}"""
_RTE_CITATION = """\
@inproceedings{dagan2005pascal,
title={The PASCAL recognising textual entailment challenge},
author={Dagan, Ido and Glickman, Oren and Magnini, Bernardo},
booktitle={Machine Learning Challenges Workshop},
pages={177--190},
year={2005},
organization={Springer}
}
@inproceedings{bar2006second,
title={The second pascal recognising textual entailment challenge},
author={Bar-Haim, Roy and Dagan, Ido and Dolan, Bill and Ferro, Lisa and Giampiccolo, Danilo and Magnini, Bernardo and Szpektor, Idan},
booktitle={Proceedings of the second PASCAL challenges workshop on recognising textual entailment},
volume={6},
number={1},
pages={6--4},
year={2006},
organization={Venice}
}
@inproceedings{giampiccolo2007third,
title={The third pascal recognizing textual entailment challenge},
author={Giampiccolo, Danilo and Magnini, Bernardo and Dagan, Ido and Dolan, Bill},
booktitle={Proceedings of the ACL-PASCAL workshop on textual entailment and paraphrasing},
pages={1--9},
year={2007},
organization={Association for Computational Linguistics}
}
@inproceedings{bentivogli2009fifth,
title={The Fifth PASCAL Recognizing Textual Entailment Challenge.},
author={Bentivogli, Luisa and Clark, Peter and Dagan, Ido and Giampiccolo, Danilo},
booktitle={TAC},
year={2009}
}"""
_MULTIRC_CITATION = """\
@inproceedings{MultiRC2018,
author = {Daniel Khashabi and Snigdha Chaturvedi and Michael Roth and Shyam Upadhyay and Dan Roth},
title = {Looking Beyond the Surface:A Challenge Set for Reading Comprehension over Multiple Sentences},
booktitle = {Proceedings of North American Chapter of the Association for Computational Linguistics (NAACL)},
year = {2018}
}"""
_WIC_CITATION = """\
@article{DBLP:journals/corr/abs-1808-09121,
author={Mohammad Taher Pilehvar and os{\'{e}} Camacho{-}Collados},
title={WiC: 10, 000 Example Pairs for Evaluating Context-Sensitive Representations},
journal={CoRR},
volume={abs/1808.09121},
year={2018},
url={http://arxiv.org/abs/1808.09121},
archivePrefix={arXiv},
eprint={1808.09121},
timestamp={Mon, 03 Sep 2018 13:36:40 +0200},
biburl={https://dblp.org/rec/bib/journals/corr/abs-1808-09121},
bibsource={dblp computer science bibliography, https://dblp.org}
}"""
_WSC_CITATION = """\
@inproceedings{levesque2012winograd,
title={The winograd schema challenge},
author={Levesque, Hector and Davis, Ernest and Morgenstern, Leora},
booktitle={Thirteenth International Conference on the Principles of Knowledge Representation and Reasoning},
year={2012}
}"""
_AXG_CITATION = """\
@inproceedings{rudinger-EtAl:2018:N18,
author = {Rudinger, Rachel and Naradowsky, Jason and Leonard, Brian and {Van Durme}, Benjamin},
title = {Gender Bias in Coreference Resolution},
booktitle = {Proceedings of the 2018 Conference of the North American Chapter of the Association for Computational Linguistics: Human Language Technologies},
month = {June},
year = {2018},
address = {New Orleans, Louisiana},
publisher = {Association for Computational Linguistics}
}
"""
class SuperGlueConfig(datasets.BuilderConfig):
"""BuilderConfig for SuperGLUE."""
def __init__(self, features, data_url, citation, url, label_classes=("False", "True"), **kwargs):
"""BuilderConfig for SuperGLUE.
Args:
features: `list[string]`, list of the features that will appear in the
feature dict. Should not include "label".
data_url: `string`, url to download the zip file from.
citation: `string`, citation for the data set.
url: `string`, url for information about the data set.
label_classes: `list[string]`, the list of classes for the label if the
label is present as a string. Non-string labels will be cast to either
'False' or 'True'.
**kwargs: keyword arguments forwarded to super.
"""
# Version history:
# 1.0.2: Fixed non-nondeterminism in ReCoRD.
# 1.0.1: Change from the pre-release trial version of SuperGLUE (v1.9) to
# the full release (v2.0).
# 1.0.0: S3 (new shuffling, sharding and slicing mechanism).
# 0.0.2: Initial version.
super(SuperGlueConfig, self).__init__(version=datasets.Version("1.0.2"), **kwargs)
self.features = features
self.label_classes = label_classes
self.data_url = data_url
self.citation = citation
self.url = url
class SuperGlue(datasets.GeneratorBasedBuilder):
"""The SuperGLUE benchmark."""
def __init__(self, *args, data_split=0, **kwargs):
super().__init__(*args, **kwargs)
self.data_split = str(data_split)
print(f"Using dataset split {data_split}")
ROOT_PATH = "/mnt/FewGLUE_v2"
BUILDER_CONFIGS = [
SuperGlueConfig(
name="boolq",
description=_BOOLQ_DESCRIPTION,
features=["question", "passage"],
data_url=os.path.join(ROOT_PATH, "BoolQ"),
citation=_BOOLQ_CITATION,
url="https://github.com/google-research-datasets/boolean-questions",
),
SuperGlueConfig(
name="cb",
description=_CB_DESCRIPTION,
features=["premise", "hypothesis"],
label_classes=["entailment", "contradiction", "neutral"],
data_url="https://dl.fbaipublicfiles.com/glue/superglue/data/v2/CB.zip",
citation=_CB_CITATION,
url="https://github.com/mcdm/CommitmentBank",
),
SuperGlueConfig(
name="copa",
description=_COPA_DESCRIPTION,
label_classes=["choice1", "choice2"],
# Note that question will only be the X in the statement "What's
# the X for this?".
features=["premise", "choice1", "choice2", "question"],
data_url=os.path.join(ROOT_PATH, "COPA"),
citation=_COPA_CITATION,
url="http://people.ict.usc.edu/~gordon/copa.html",
),
SuperGlueConfig(
name="multirc",
description=_MULTIRC_DESCRIPTION,
features=["paragraph", "question", "answer"],
data_url=os.path.join(ROOT_PATH, "MultiRC"),
citation=_MULTIRC_CITATION,
url="https://cogcomp.org/multirc/",
),
SuperGlueConfig(
name="record",
description=_RECORD_DESCRIPTION,
# Note that entities and answers will be a sequences of strings. Query
# will contain @placeholder as a substring, which represents the word
# to be substituted in.
features=["passage", "query", "entities", "answers"],
data_url=os.path.join(ROOT_PATH, "ReCoRD"),
citation=_RECORD_CITATION,
url="https://sheng-z.github.io/ReCoRD-explorer/",
),
SuperGlueConfig(
name="rte",
description=_RTE_DESCRIPTION,
features=["premise", "hypothesis"],
label_classes=["entailment", "not_entailment"],
data_url=os.path.join(ROOT_PATH, "RTE"),
citation=_RTE_CITATION,
url="https://aclweb.org/aclwiki/Recognizing_Textual_Entailment",
),
SuperGlueConfig(
name="wic",
description=_WIC_DESCRIPTION,
# Note that start1, start2, end1, and end2 will be integers stored as
# datasets.Value('int32').
features=["word", "sentence1", "sentence2", "start1", "start2", "end1", "end2"],
data_url=os.path.join(ROOT_PATH, "WiC"),
citation=_WIC_CITATION,
url="https://pilehvar.github.io/wic/",
),
SuperGlueConfig(
name="wsc",
description=_WSC_DESCRIPTION,
# Note that span1_index and span2_index will be integers stored as
# datasets.Value('int32').
features=["text", "span1_index", "span2_index", "span1_text", "span2_text"],
data_url=os.path.join(ROOT_PATH, "WSC"),
citation=_WSC_CITATION,
url="https://cs.nyu.edu/faculty/davise/papers/WinogradSchemas/WS.html",
),
SuperGlueConfig(
name="wsc.fixed",
description=(
_WSC_DESCRIPTION + "\n\nThis version fixes issues where the spans are not actually "
"substrings of the text."
),
# Note that span1_index and span2_index will be integers stored as
# datasets.Value('int32').
features=["text", "span1_index", "span2_index", "span1_text", "span2_text"],
data_url=os.path.join(ROOT_PATH, "WSC"),
citation=_WSC_CITATION,
url="https://cs.nyu.edu/faculty/davise/papers/WinogradSchemas/WS.html",
),
SuperGlueConfig(
name="axb",
description=_AXB_DESCRIPTION,
features=["sentence1", "sentence2"],
label_classes=["entailment", "not_entailment"],
data_url="https://dl.fbaipublicfiles.com/glue/superglue/data/v2/AX-b.zip",
citation="", # The GLUE citation is sufficient.
url="https://gluebenchmark.com/diagnostics",
),
SuperGlueConfig(
name="axg",
description=_AXG_DESCRIPTION,
features=["premise", "hypothesis"],
label_classes=["entailment", "not_entailment"],
data_url="https://dl.fbaipublicfiles.com/glue/superglue/data/v2/AX-g.zip",
citation=_AXG_CITATION,
url="https://github.com/rudinger/winogender-schemas",
),
]
def _info(self):
features = {feature: datasets.Value("string") for feature in self.config.features}
if self.config.name.startswith("wsc"):
features["span1_index"] = datasets.Value("int32")
features["span2_index"] = datasets.Value("int32")
if self.config.name == "wic":
features["start1"] = datasets.Value("int32")
features["start2"] = datasets.Value("int32")
features["end1"] = datasets.Value("int32")
features["end2"] = datasets.Value("int32")
if self.config.name == "multirc":
features["idx"] = dict(
{
"paragraph": datasets.Value("int32"),
"question": datasets.Value("int32"),
"answer": datasets.Value("int32"),
}
)
elif self.config.name == "record":
features["idx"] = dict(
{
"passage": datasets.Value("int32"),
"query": datasets.Value("int32"),
}
)
else:
features["idx"] = datasets.Value("int32")
if self.config.name == "record":
# Entities are the set of possible choices for the placeholder.
features["entities"] = datasets.features.Sequence(datasets.Value("string"))
# Answers are the subset of entities that are correct.
features["answers"] = datasets.features.Sequence(datasets.Value("string"))
else:
features["label"] = datasets.features.ClassLabel(names=self.config.label_classes)
return datasets.DatasetInfo(
description=_GLUE_DESCRIPTION + self.config.description,
features=datasets.Features(features),
homepage=self.config.url,
citation=self.config.citation + "\n" + _SUPER_GLUE_CITATION,
)
def _split_generators(self, dl_manager):
dl_dir = self.config.data_url
task_name = os.path.basename(dl_dir)
if self.config.name in ["axb", "axg"]:
return [
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={
"data_file": os.path.join(dl_dir, f"{task_name}.jsonl"),
"split": datasets.Split.TEST,
},
),
]
return [
datasets.SplitGenerator(
name=datasets.Split.TRAIN,
gen_kwargs={
"data_file": os.path.join(dl_dir, self.data_split, "train.jsonl"),
"split": datasets.Split.TRAIN,
},
),
datasets.SplitGenerator(
name=datasets.Split.VALIDATION,
gen_kwargs={
"data_file": os.path.join(dl_dir, self.data_split, "val.jsonl"),
"split": datasets.Split.VALIDATION,
},
),
datasets.SplitGenerator(
name=datasets.Split.TEST,
gen_kwargs={
"data_file": os.path.join(dl_dir, self.data_split, "test.jsonl"),
"split": datasets.Split.TEST,
},
),
]
def _generate_examples(self, data_file, split):
with open(data_file, encoding="utf-8") as f:
for line in f:
row = json.loads(line)
if self.config.name == "multirc":
paragraph = row["passage"]
for question in paragraph["questions"]:
for answer in question["answers"]:
label = answer.get("label")
key = "%s_%s_%s" % (row["idx"], question["idx"], answer["idx"])
yield key, {
"paragraph": paragraph["text"],
"question": question["question"],
"answer": answer["text"],
"label": -1 if label is None else _cast_label(bool(label)),
"idx": {"paragraph": row["idx"], "question": question["idx"], "answer": answer["idx"]},
}
elif self.config.name == "record":
passage = row["passage"]
for qa in row["qas"]:
yield qa["idx"], {
"passage": passage["text"],
"query": qa["query"],
"entities": _get_record_entities(passage),
"answers": _get_record_answers(qa),
"idx": {"passage": row["idx"], "query": qa["idx"]},
}
else:
if self.config.name.startswith("wsc"):
row.update(row["target"])
example = {feature: row[feature] for feature in self.config.features}
if self.config.name == "wsc.fixed":
example = _fix_wst(example)
example["idx"] = row["idx"]
if "label" in row:
if self.config.name == "copa":
example["label"] = "choice2" if row["label"] else "choice1"
else:
example["label"] = _cast_label(row["label"])
else:
assert split == datasets.Split.TEST, row
example["label"] = -1
yield example["idx"], example
def _fix_wst(ex):
"""Fixes most cases where spans are not actually substrings of text."""
def _fix_span_text(k):
"""Fixes a single span."""
text = ex[k + "_text"]
index = ex[k + "_index"]
if text in ex["text"]:
return
if text in ("Kamenev and Zinoviev", "Kamenev, Zinoviev, and Stalin"):
# There is no way to correct these examples since the subjects have
# intervening text.
return
if "theyscold" in text:
ex["text"].replace("theyscold", "they scold")
ex["span2_index"] = 10
# Make sure case of the first words match.
first_word = ex["text"].split()[index]
if first_word[0].islower():
text = text[0].lower() + text[1:]
else:
text = text[0].upper() + text[1:]
# Remove punctuation in span.
text = text.rstrip(".")
# Replace incorrect whitespace character in span.
text = text.replace("\n", " ")
ex[k + "_text"] = text
assert ex[k + "_text"] in ex["text"], ex
_fix_span_text("span1")
_fix_span_text("span2")
return ex
def _cast_label(label):
"""Converts the label into the appropriate string version."""
if isinstance(label, str):
return label
elif isinstance(label, bool):
return "True" if label else "False"
elif isinstance(label, int):
assert label in (0, 1)
return str(label)
else:
raise ValueError("Invalid label format.")
def _get_record_entities(passage):
"""Returns the unique set of entities."""
text = passage["text"]
entities = set()
for entity in passage["entities"]:
entities.add(text[entity["start"] : entity["end"] + 1])
return sorted(entities)
def _get_record_answers(qa):
"""Returns the unique set of answers."""
if "answers" not in qa:
return []
answers = set()
for answer in qa["answers"]:
answers.add(answer["text"])
return sorted(answers)
def _get_task_name_from_data_url(data_url):
return data_url.split("/")[-1].split(".")[0]
| 47.357711
| 159
| 0.664596
|
03536fb8297daba960bdb5d674f5b1ef2b83f196
| 1,893
|
py
|
Python
|
pype/tools/pyblish_pype/constants.py
|
kalisp/pype
|
28bbffaf2d12ccee48313cd9985e8dfa05e81a5c
|
[
"MIT"
] | 87
|
2021-05-07T08:40:46.000Z
|
2022-03-19T00:36:25.000Z
|
openpype/tools/pyblish_pype/constants.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | 1,019
|
2021-04-26T06:22:56.000Z
|
2022-03-31T16:30:43.000Z
|
openpype/tools/pyblish_pype/constants.py
|
zafrs/OpenPype
|
4b8e7e1ed002fc55b31307efdea70b0feaed474f
|
[
"MIT"
] | 33
|
2021-04-29T12:35:54.000Z
|
2022-03-25T14:48:42.000Z
|
from Qt import QtCore
EXPANDER_WIDTH = 20
def flags(*args, **kwargs):
type_name = kwargs.pop("type_name", "Flags")
with_base = kwargs.pop("with_base", False)
enums = {}
for idx, attr_name in enumerate(args):
if with_base:
if idx == 0:
enums[attr_name] = 0
continue
idx -= 1
enums[attr_name] = 2**idx
for attr_name, value in kwargs.items():
enums[attr_name] = value
return type(type_name, (), enums)
def roles(*args, **kwargs):
type_name = kwargs.pop("type_name", "Roles")
enums = {}
for attr_name, value in kwargs.items():
enums[attr_name] = value
offset = 0
for idx, attr_name in enumerate(args):
_idx = idx + QtCore.Qt.UserRole + offset
while _idx in enums.values():
offset += 1
_idx = idx + offset
enums[attr_name] = _idx
return type(type_name, (), enums)
Roles = roles(
"ObjectIdRole",
"ObjectUIdRole",
"TypeRole",
"PublishFlagsRole",
"LogRecordsRole",
"IsOptionalRole",
"IsEnabledRole",
"FamiliesRole",
"DocstringRole",
"PathModuleRole",
"PluginActionsVisibleRole",
"PluginValidActionsRole",
"PluginActionProgressRole",
"TerminalItemTypeRole",
"IntentItemValue",
type_name="ModelRoles"
)
InstanceStates = flags(
"ContextType",
"InProgress",
"HasWarning",
"HasError",
"HasFinished",
type_name="InstanceState"
)
PluginStates = flags(
"IsCompatible",
"InProgress",
"WasProcessed",
"WasSkipped",
"HasWarning",
"HasError",
type_name="PluginState"
)
GroupStates = flags(
"HasWarning",
"HasError",
"HasFinished",
type_name="GroupStates"
)
PluginActionStates = flags(
"InProgress",
"HasFailed",
"HasFinished",
type_name="PluginActionStates"
)
| 19.316327
| 48
| 0.601162
|
b8623ba8e2e5a099b8470b56d8586c05c387eebb
| 2,585
|
py
|
Python
|
Tests/test_bigint.py
|
dsonbill/IronPython3-NETCore
|
8c76bdbec1754233f04b41ecd28e9bae2c862fd0
|
[
"Apache-2.0"
] | 2
|
2019-09-21T22:22:30.000Z
|
2020-05-09T12:45:51.000Z
|
Tests/test_bigint.py
|
dsonbill/IronPython3-NETCore
|
8c76bdbec1754233f04b41ecd28e9bae2c862fd0
|
[
"Apache-2.0"
] | null | null | null |
Tests/test_bigint.py
|
dsonbill/IronPython3-NETCore
|
8c76bdbec1754233f04b41ecd28e9bae2c862fd0
|
[
"Apache-2.0"
] | null | null | null |
#####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an email to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
#####################################################################################
import unittest
from iptest import run_test
class BigIntTest(unittest.TestCase):
def axiom_helper(self, a, b):
self.assertTrue((a / b) * b + (a % b) == a, "(" + str(a) + " / " + str(b) + ") * " + str(b) + " + ( " + str(a) + " % " + str(b) + ") != " + str(a))
def misc_helper(self, i, j, k):
u = i * j + k
self.axiom_helper(u, j)
def test_axioms(self):
a = -209681412991024529003047811046079621104607962110459585190118809030105845255159325119855216402270708
b = 37128952704582304957243524
self.axiom_helper(a,b)
a = 209681412991024529003047811046079621104607962110459585190118809030105845255159325119855216402270708
b = 37128952704582304957243524
self.axiom_helper(a,b)
def test_misc(self):
i = -5647382910564738291056473829105647382910564738291023857209485209457092435
j = 37128952704582304957243524
k = 37128952704582304957243524
k = k - j
while j > k:
self.misc_helper(i, j, k)
k = k * 2 + 312870870232
i = 5647382910564738291056473829105647382910564738291023857209485209457092435
while j > k:
self.misc_helper(i, j, k)
k = k * 2 + 312870870232
self.assertTrue(12297829382473034410)
def test_hex_conversions(self):
# Test hex conversions. CPython 2.5 uses capital L, lowercase letters a...f)
s = hex(27) # 0x1b
self.assertTrue(s == "0x1bL", "27L: Expect lowercase digits. Received: %s." % (s));
s = hex(-27)
self.assertTrue(s == "-0x1bL", "-27L: Expect lowercase digits. Received: %s." % (s));
def test_negative_misc(self):
self.assertRaises(ValueError, #"invalid literal for long() with base 10: ''",
lambda: int(''))
run_test(__name__)
| 35.902778
| 155
| 0.599613
|
e6b608aea4b9bb6b1fd9bd18d2a233fffe68fc54
| 738
|
py
|
Python
|
env_flag.py
|
paulmelnikow/env-flag
|
be074534896af557f54caef4f8d1cf64228f3aee
|
[
"BSD-2-Clause"
] | null | null | null |
env_flag.py
|
paulmelnikow/env-flag
|
be074534896af557f54caef4f8d1cf64228f3aee
|
[
"BSD-2-Clause"
] | 7
|
2018-11-26T10:21:18.000Z
|
2019-04-03T10:37:15.000Z
|
env_flag.py
|
paulmelnikow/env-flag-fork
|
be074534896af557f54caef4f8d1cf64228f3aee
|
[
"BSD-2-Clause"
] | null | null | null |
__version__ = '1.1.0'
def env_flag(env_var, default=False):
"""
Return the specified environment variable coerced to a bool, as follows:
- When the variable is unset, or set to the empty string, return `default`.
- When the variable is set to a truthy value, returns `True`.
These are the truthy values:
- 1
- true, yes, on
- When the variable is set to the anything else, returns False.
Example falsy values:
- 0
- no
- Ignore case and leading/trailing whitespace.
"""
import os
environ_string = os.environ.get(env_var, '').strip().lower()
if not environ_string:
return default
return environ_string in ['1', 'true', 'yes', 'on']
| 32.086957
| 79
| 0.624661
|
48d85ef46246873b21fc464b8ee01432399f6bc9
| 11,040
|
py
|
Python
|
bnpy/allocmodel/topics/LocalStepSingleDoc.py
|
co2meal/-bnpy-dev
|
74f69afde6c9dac8de4c074842df53ae87a15ac1
|
[
"BSD-3-Clause"
] | null | null | null |
bnpy/allocmodel/topics/LocalStepSingleDoc.py
|
co2meal/-bnpy-dev
|
74f69afde6c9dac8de4c074842df53ae87a15ac1
|
[
"BSD-3-Clause"
] | null | null | null |
bnpy/allocmodel/topics/LocalStepSingleDoc.py
|
co2meal/-bnpy-dev
|
74f69afde6c9dac8de4c074842df53ae87a15ac1
|
[
"BSD-3-Clause"
] | null | null | null |
from scipy.special import digamma, gammaln
import numpy as np
def calcLocalParams_SingleDoc(
wc_d, Lik_d, alphaEbeta, alphaEbetaRem=None,
DocTopicCount_d=None, sumResp_d=None,
nCoordAscentItersLP=10, convThrLP=0.001,
restartLP=0,
**kwargs):
''' Infer local parameters for a single document.
Args
--------
wc_d : scalar or 1D array, size N
word counts for document d
Lik_d : 2D array, size N x K
Likelihood values for each token n and topic k.
alphaEbeta : 1D array, size K
Scalar prior parameter for each active topic, under the prior.
alphaEbetaRem : None or scalar
Scalar prior parameter for all inactive topics, aggregated.
Used only for ELBO calculation, not any update equations.
Kwargs
--------
nCoordAscentItersLP : int
Number of local step iterations to do for this document.
convThrLP : float
Threshold for convergence to halt iterations early.
restartLP : int
If 0, do not perform sparse restarts.
If 1, perform sparse restarts.
Returns
--------
DocTopicCount_d : 1D array, size K
DocTopicProb_d : 1D array, size K
Updated probability vector for active topics in this doc.
Known up to a multiplicative constant.
sumResp_d : 1D array, size N_d
sumResp_d[n] is normalization constant for token n.
That is, resp[n, :] / sumResp_d[n] will sum to one, when
resp[n,k] is computed from DocTopicCount_d and Lik_d.
Info : dict
Contains info about convergence, sparse restarts, etc.
'''
if sumResp_d is None:
sumResp_d = np.zeros(Lik_d.shape[0])
# Initialize prior from global topic probs
DocTopicProb_d = alphaEbeta.copy()
if DocTopicCount_d is None:
# Update sumResp for all tokens in document
np.dot(Lik_d, DocTopicProb_d, out=sumResp_d)
# Update DocTopicCounts
DocTopicCount_d = np.zeros_like(DocTopicProb_d)
np.dot(wc_d / sumResp_d, Lik_d, out=DocTopicCount_d)
DocTopicCount_d *= DocTopicProb_d
prevDocTopicCount_d = DocTopicCount_d.copy()
for iter in xrange(nCoordAscentItersLP):
# Update Prob of Active Topics
# First, in logspace, so Prob_d[k] = E[ log pi_dk ] + const
np.add(DocTopicCount_d, alphaEbeta, out=DocTopicProb_d)
digamma(DocTopicProb_d, out=DocTopicProb_d)
# TODO: subtract max for safe exp? doesnt seem necessary...
# Convert: Prob_d[k] = exp E[ log pi_dk ] / const
np.exp(DocTopicProb_d, out=DocTopicProb_d)
# Update sumResp for all tokens in document
np.dot(Lik_d, DocTopicProb_d, out=sumResp_d)
# Update DocTopicCounts
np.dot(wc_d / sumResp_d, Lik_d, out=DocTopicCount_d)
DocTopicCount_d *= DocTopicProb_d
# Check for convergence
if iter % 5 == 0:
maxDiff = np.max(np.abs(DocTopicCount_d - prevDocTopicCount_d))
if maxDiff < convThrLP:
break
prevDocTopicCount_d[:] = DocTopicCount_d
Info = dict(maxDiff=maxDiff, iter=iter)
if restartLP:
DocTopicCount_d, DocTopicProb_d, sumResp_d, RInfo = \
removeJunkTopics_SingleDoc(
wc_d, Lik_d, alphaEbeta, alphaEbetaRem,
DocTopicCount_d, DocTopicProb_d, sumResp_d, **kwargs)
Info.update(RInfo)
return DocTopicCount_d, DocTopicProb_d, sumResp_d, Info
def removeJunkTopics_SingleDoc(
wc_d, Lik_d, alphaEbeta, alphaEbetaRem,
DocTopicCount_d, DocTopicProb_d, sumResp_d,
restartNumTrialsLP=5,
restartNumItersLP=2,
restartCriteriaLP='smallest',
restartMinSizeThrLP=0.001,
**kwargs):
''' Propose candidate local parameters, accept if ELBO improves.
Returns
--------
DocTopicCount_d : 1D array, size K
DocTopicProb_d : 1D array, size K
sumResp_d : 1D array, size N
Info : dict
'''
Info = dict(nTrial=0, nAccept=0)
# usedTopics : 1D array of int ids of topics with mass above MinSizeThr
usedTopicMask = DocTopicCount_d > restartMinSizeThrLP
usedTopics = np.flatnonzero(usedTopicMask)
nUsed = np.sum(usedTopicMask)
if nUsed < 2:
return DocTopicCount_d, DocTopicProb_d, sumResp_d, Info
# Measure current model quality via ELBO
curELBO = calcELBO_SingleDoc(
DocTopicCount_d, DocTopicProb_d, sumResp_d,
wc_d, alphaEbeta, alphaEbetaRem)
Info['startELBO'] = curELBO
# Determine eligible topics to delete
# smallTopics : 1D array of int topic ids to try deleting
smallIDs = np.argsort(DocTopicCount_d[usedTopics])[:restartNumTrialsLP]
smallTopics = usedTopics[smallIDs]
smallTopics = smallTopics[:nUsed - 1]
pDocTopicCount_d = np.zeros_like(DocTopicCount_d)
pDocTopicProb_d = np.zeros_like(DocTopicProb_d)
psumResp_d = np.zeros_like(sumResp_d)
for kID in smallTopics:
# Propose deleting current "small" topic
pDocTopicCount_d[:] = DocTopicCount_d
pDocTopicCount_d[kID] = 0
# Refine initial proposal via standard coord ascent updates
for iter in xrange(restartNumItersLP):
np.add(pDocTopicCount_d, alphaEbeta, out=pDocTopicProb_d)
digamma(pDocTopicProb_d, out=pDocTopicProb_d)
np.exp(pDocTopicProb_d, out=pDocTopicProb_d)
np.dot(Lik_d, pDocTopicProb_d, out=psumResp_d)
# Update DocTopicCounts
np.dot(wc_d / psumResp_d, Lik_d, out=pDocTopicCount_d)
pDocTopicCount_d *= pDocTopicProb_d
# Evaluate proposal quality via ELBO
propELBO = calcELBO_SingleDoc(
pDocTopicCount_d, pDocTopicProb_d, psumResp_d,
wc_d, alphaEbeta, alphaEbetaRem)
Info['nTrial'] += 1
if not np.isfinite(propELBO):
print 'WARNING! propELBO not finite.'
continue
# Update if accepted!
if propELBO > curELBO:
Info['nAccept'] += 1
curELBO = propELBO
DocTopicCount_d[:] = pDocTopicCount_d
DocTopicProb_d[:] = pDocTopicProb_d
sumResp_d[:] = psumResp_d
nUsed -= 1
if nUsed < 2:
break
# Package up and return
Info['finalELBO'] = curELBO
return DocTopicCount_d, DocTopicProb_d, sumResp_d, Info
def calcELBO_SingleDoc(DocTopicCount_d, DocTopicProb_d, sumResp_d,
wc_d, alphaEbeta, alphaEbetaRem):
''' Calculate single document contribution to the ELBO objective.
This isolates all ELBO terms that depend on local parameters of this doc.
Returns
-------
L : scalar float
value of ELBO objective, up to additive constant.
This constant is independent of any local parameter attached to doc d.
'''
theta_d = DocTopicCount_d + alphaEbeta
if alphaEbetaRem is None:
# LDA model, with K active topics
sumTheta = theta_d.sum()
digammaSum = digamma(sumTheta)
ElogPi_d = digamma(theta_d) - digammaSum
L_alloc = np.sum(gammaln(theta_d)) - gammaln(sumTheta)
# SLACK terms are always equal to zero!
else:
# HDP, with K active topics and one aggregate "leftover" topic
sumTheta = theta_d.sum() + alphaEbetaRem
digammaSum = digamma(sumTheta)
ElogPi_d = digamma(theta_d) - digammaSum
ElogPiRem = digamma(alphaEbetaRem) - digammaSum
L_alloc = np.sum(gammaln(theta_d)) + gammaln(alphaEbetaRem) \
- gammaln(sumTheta)
# SLACK terms are always equal to zero!
if isinstance(wc_d, float):
L_rest = np.sum(np.log(sumResp_d))
else:
L_rest = np.inner(wc_d, np.log(sumResp_d))
L_rest -= np.inner(DocTopicCount_d, np.log(DocTopicProb_d + 1e-100))
return L_alloc + L_rest
def calcLocalParams_SingleDoc_WithELBOTrace(
wc_d, Lik_d, alphaEbeta, alphaEbetaRem=None,
DocTopicCount_d=None, sumResp_d=None,
nCoordAscentItersLP=10, convThrLP=0.001,
restartLP=0,
**kwargs):
''' Infer local parameters for a single document, with ELBO trace.
Performs same calculations as calcLocalParams_SingleDoc,
but (expensively) tracks the ELBO at every local step iteration.
Thus, we refactored this into a separate function, so we do not
pay a performance penalty for an if statement in the inner loop.
Args
--------
Same as calcLocalParams_SingleDoc
Returns
--------
DocTopicCount_d : updated doc-topic counts
Prior_d : prob of topic in document, up to mult. constant
sumR_d : normalization constant for each token
Info : dict, with field
* 'ELBOtrace' : 1D array, size nIters
which gives the ELBO over the iterations on this document
up to additive const indep of local params.
'''
if sumResp_d is None:
sumResp_d = np.zeros(Lik_d.shape[0])
# Initialize prior from global topic probs
DocTopicProb_d = alphaEbeta.copy()
if DocTopicCount_d is None:
# Update sumResp for all tokens in document
np.dot(Lik_d, DocTopicProb_d, out=sumResp_d)
# Update DocTopicCounts
DocTopicCount_d = np.zeros_like(DocTopicProb_d)
np.dot(wc_d / sumResp_d, Lik_d, out=DocTopicCount_d)
DocTopicCount_d *= DocTopicProb_d
ELBOtrace = list()
prevDocTopicCount_d = DocTopicCount_d.copy()
for iter in xrange(nCoordAscentItersLP):
# Update Prob of Active Topics
# First, in logspace, so Prob_d[k] = E[ log pi_dk ] + const
np.add(DocTopicCount_d, alphaEbeta, out=DocTopicProb_d)
digamma(DocTopicProb_d, out=DocTopicProb_d)
# TODO: subtract max for safe exp? doesnt seem necessary...
# Convert: Prob_d[k] = exp E[ log pi_dk ] / const
np.exp(DocTopicProb_d, out=DocTopicProb_d)
# Update sumResp for all tokens in document
np.dot(Lik_d, DocTopicProb_d, out=sumResp_d)
# Update DocTopicCounts
np.dot(wc_d / sumResp_d, Lik_d, out=DocTopicCount_d)
DocTopicCount_d *= DocTopicProb_d
# Calculate ELBO objective at current assignments
curELBO = calcELBO_SingleDoc(
DocTopicCount_d, DocTopicProb_d, sumResp_d,
wc_d, alphaEbeta, alphaEbetaRem)
ELBOtrace.append(curELBO)
# Check for convergence
if iter % 5 == 0:
maxDiff = np.max(np.abs(DocTopicCount_d - prevDocTopicCount_d))
if maxDiff < convThrLP:
break
prevDocTopicCount_d[:] = DocTopicCount_d
Info = dict(maxDiff=maxDiff, iter=iter)
Info['ELBOtrace'] = np.asarray(ELBOtrace)
if restartLP:
DocTopicCount_d, DocTopicProb_d, sumResp_d, RInfo = \
removeJunkTopics_SingleDoc(
wc_d, Lik_d, alphaEbeta, alphaEbetaRem,
DocTopicCount_d, DocTopicProb_d, sumResp_d, **kwargs)
Info.update(RInfo)
return DocTopicCount_d, DocTopicProb_d, sumResp_d, Info
| 35.612903
| 78
| 0.656884
|
29b5efbdc95a6c51e3142f8305541dfb270c96fa
| 6,763
|
py
|
Python
|
shellpartylib/lib/messages/shellpay.py
|
satoshichain/satoshiparty-lib
|
87b99678e5498b1f536f9be589b1b0cde6f21ad0
|
[
"MIT"
] | null | null | null |
shellpartylib/lib/messages/shellpay.py
|
satoshichain/satoshiparty-lib
|
87b99678e5498b1f536f9be589b1b0cde6f21ad0
|
[
"MIT"
] | null | null | null |
shellpartylib/lib/messages/shellpay.py
|
satoshichain/satoshiparty-lib
|
87b99678e5498b1f536f9be589b1b0cde6f21ad0
|
[
"MIT"
] | null | null | null |
#! /usr/bin/python3
import binascii
import struct
import logging
logger = logging.getLogger(__name__)
from shellpartylib.lib import config
from shellpartylib.lib import exceptions
from shellpartylib.lib import util
from shellpartylib.lib import log
FORMAT = '>32s32s'
LENGTH = 32 + 32
ID = 11
def initialise(db):
cursor = db.cursor()
cursor.execute('''CREATE TABLE IF NOT EXISTS shellpays(
tx_index INTEGER PRIMARY KEY,
tx_hash TEXT UNIQUE,
block_index INTEGER,
source TEXT,
destination TEXT,
shell_amount INTEGER,
order_match_id TEXT,
status TEXT,
FOREIGN KEY (tx_index, tx_hash, block_index) REFERENCES transactions(tx_index, tx_hash, block_index))
''')
# Disallows invalids: FOREIGN KEY (order_match_id) REFERENCES order_matches(id))
cursor.execute('''CREATE INDEX IF NOT EXISTS
block_index_idx ON shellpays (block_index)
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
source_idx ON shellpays (source)
''')
cursor.execute('''CREATE INDEX IF NOT EXISTS
destination_idx ON shellpays (destination)
''')
def validate (db, source, order_match_id, block_index):
problems = []
order_match = None
cursor = db.cursor()
cursor.execute('''SELECT * FROM order_matches \
WHERE id = ?''', (order_match_id,))
order_matches = cursor.fetchall()
cursor.close()
if len(order_matches) == 0:
problems.append('no such order match')
return None, None, None, None, order_match, problems
elif len(order_matches) > 1:
assert False
else:
order_match = order_matches[0]
if order_match['status'] == 'expired':
problems.append('order match expired')
elif order_match['status'] == 'completed':
problems.append('order match completed')
elif order_match['status'].startswith('invalid'):
problems.append('order match invalid')
elif order_match['status'] != 'pending':
raise exceptions.OrderError('unrecognised order match status')
# Figure out to which address the SCH are being paid.
# Check that source address is correct.
if order_match['backward_asset'] == config.SCH:
if source != order_match['tx1_address'] and not (block_index >= 313900 or config.TESTNET): # Protocol change.
problems.append('incorrect source address')
destination = order_match['tx0_address']
shell_quantity = order_match['backward_quantity']
escrowed_asset = order_match['forward_asset']
escrowed_quantity = order_match['forward_quantity']
elif order_match['forward_asset'] == config.SCH:
if source != order_match['tx0_address'] and not (block_index >= 313900 or config.TESTNET): # Protocol change.
problems.append('incorrect source address')
destination = order_match['tx1_address']
shell_quantity = order_match['forward_quantity']
escrowed_asset = order_match['backward_asset']
escrowed_quantity = order_match['backward_quantity']
else:
assert False
return destination, shell_quantity, escrowed_asset, escrowed_quantity, order_match, problems
def compose (db, source, order_match_id):
tx0_hash, tx1_hash = util.parse_id(order_match_id)
destination, shell_quantity, escrowed_asset, escrowed_quantity, order_match, problems = validate(db, source, order_match_id, util.CURRENT_BLOCK_INDEX)
if problems: raise exceptions.ComposeError(problems)
# Warn if down to the wire.
time_left = order_match['match_expire_index'] - util.CURRENT_BLOCK_INDEX
if time_left < 4:
logger.warning('Only {} blocks until that order match expires. The payment might not make into the blockchain in time.'.format(time_left))
if 10 - time_left < 4:
logger.warning('Order match has only {} confirmation(s).'.format(10 - time_left))
tx0_hash_bytes, tx1_hash_bytes = binascii.unhexlify(bytes(tx0_hash, 'utf-8')), binascii.unhexlify(bytes(tx1_hash, 'utf-8'))
data = struct.pack(config.TXTYPE_FORMAT, ID)
data += struct.pack(FORMAT, tx0_hash_bytes, tx1_hash_bytes)
return (source, [(destination, shell_quantity)], data)
def parse (db, tx, message):
cursor = db.cursor()
# Unpack message.
try:
if len(message) != LENGTH:
raise exceptions.UnpackError
tx0_hash_bytes, tx1_hash_bytes = struct.unpack(FORMAT, message)
tx0_hash, tx1_hash = binascii.hexlify(tx0_hash_bytes).decode('utf-8'), binascii.hexlify(tx1_hash_bytes).decode('utf-8')
order_match_id = util.make_id(tx0_hash, tx1_hash)
status = 'valid'
except (exceptions.UnpackError, struct.error) as e:
tx0_hash, tx1_hash, order_match_id = None, None, None
status = 'invalid: could not unpack'
if status == 'valid':
destination, shell_quantity, escrowed_asset, escrowed_quantity, order_match, problems = validate(db, tx['source'], order_match_id, tx['block_index'])
if problems:
order_match = None
status = 'invalid: ' + '; '.join(problems)
if status == 'valid':
# SCH must be paid all at once.
if tx['shell_amount'] >= shell_quantity:
# Credit source address for the currency that he bought with the SatoshiChains.
util.credit(db, tx['source'], escrowed_asset, escrowed_quantity, action='shellpay', event=tx['tx_hash'])
status = 'valid'
# Update order match.
bindings = {
'status': 'completed',
'order_match_id': order_match_id
}
sql='update order_matches set status = :status where id = :order_match_id'
cursor.execute(sql, bindings)
log.message(db, tx['block_index'], 'update', 'order_matches', bindings)
# Add parsed transaction to message-type–specific table.
bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'destination': tx['destination'],
'shell_amount': tx['shell_amount'],
'order_match_id': order_match_id,
'status': status,
}
sql='insert into shellpays values(:tx_index, :tx_hash, :block_index, :source, :destination, :shell_amount, :order_match_id, :status)'
cursor.execute(sql, bindings)
cursor.close()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| 42.006211
| 157
| 0.640248
|
bb9e8e6a380aac2b42a120a11ed1a8e292a0efe7
| 3,526
|
py
|
Python
|
virtual/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py
|
Jamesmwangi245/flask-3
|
fb5bdb44cc55a45a27a206d08cb811bedfc6b21e
|
[
"MIT"
] | 7,089
|
2015-01-01T10:48:04.000Z
|
2022-03-31T08:47:02.000Z
|
virtual/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py
|
Jamesmwangi245/flask-3
|
fb5bdb44cc55a45a27a206d08cb811bedfc6b21e
|
[
"MIT"
] | 8,417
|
2015-01-01T13:03:16.000Z
|
2022-03-31T17:40:27.000Z
|
virtual/lib/python3.8/site-packages/pip/_internal/commands/uninstall.py
|
Jamesmwangi245/flask-3
|
fb5bdb44cc55a45a27a206d08cb811bedfc6b21e
|
[
"MIT"
] | 2,663
|
2015-01-02T04:02:12.000Z
|
2022-03-30T02:30:46.000Z
|
import logging
from optparse import Values
from typing import List
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli.base_command import Command
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.exceptions import InstallationError
from pip._internal.req import parse_requirements
from pip._internal.req.constructors import (
install_req_from_line,
install_req_from_parsed_requirement,
)
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
logger = logging.getLogger(__name__)
class UninstallCommand(Command, SessionCommandMixin):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
def add_options(self) -> None:
self.cmd_opts.add_option(
"-r",
"--requirement",
dest="requirements",
action="append",
default=[],
metavar="file",
help=(
"Uninstall all the packages listed in the given requirements "
"file. This option can be used multiple times."
),
)
self.cmd_opts.add_option(
"-y",
"--yes",
dest="yes",
action="store_true",
help="Don't ask for confirmation of uninstall deletions.",
)
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options: Values, args: List[str]) -> int:
session = self.get_default_session(options)
reqs_to_uninstall = {}
for name in args:
req = install_req_from_line(
name,
isolated=options.isolated_mode,
)
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
else:
logger.warning(
"Invalid requirement: %r ignored -"
" the uninstall command expects named"
" requirements.",
name,
)
for filename in options.requirements:
for parsed_req in parse_requirements(
filename, options=options, session=session
):
req = install_req_from_parsed_requirement(
parsed_req, isolated=options.isolated_mode
)
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
if not reqs_to_uninstall:
raise InstallationError(
f"You must give at least one requirement to {self.name} (see "
f'"pip help {self.name}")'
)
protect_pip_from_modification_on_windows(
modifying_pip="pip" in reqs_to_uninstall
)
for req in reqs_to_uninstall.values():
uninstall_pathset = req.uninstall(
auto_confirm=options.yes,
verbose=self.verbosity > 0,
)
if uninstall_pathset:
uninstall_pathset.commit()
warn_if_run_as_root()
return SUCCESS
| 33.264151
| 82
| 0.598128
|
e572e794ab0a85a2882e51223807e73b6f05cfea
| 13,306
|
py
|
Python
|
sdk/attestation/azure-security-attestation/azure/security/attestation/_administration_client.py
|
lynshi/azure-sdk-for-python
|
40c530f2e9a6d93025b01cc8f6c94829c7fe95fc
|
[
"MIT"
] | null | null | null |
sdk/attestation/azure-security-attestation/azure/security/attestation/_administration_client.py
|
lynshi/azure-sdk-for-python
|
40c530f2e9a6d93025b01cc8f6c94829c7fe95fc
|
[
"MIT"
] | null | null | null |
sdk/attestation/azure-security-attestation/azure/security/attestation/_administration_client.py
|
lynshi/azure-sdk-for-python
|
40c530f2e9a6d93025b01cc8f6c94829c7fe95fc
|
[
"MIT"
] | null | null | null |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------
from typing import List, Any, Optional, TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from six import python_2_unicode_compatible
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any
from azure.core.credentials import TokenCredential
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from ._generated import AzureAttestationRestClient
from ._generated.models import AttestationType, PolicyResult, PolicyCertificatesResult, JSONWebKey, AttestationCertificateManagementBody, PolicyCertificatesModificationResult as GeneratedPolicyCertificatesModificationResult
from ._configuration import AttestationClientConfiguration
from ._models import AttestationSigner, AttestationToken, AttestationResponse, StoredAttestationPolicy, AttestationSigningKey, PolicyCertificatesModificationResult
from ._common import Base64Url
import cryptography
import cryptography.x509
import base64
from azure.core.tracing.decorator import distributed_trace
from threading import Lock, Thread
class AttestationAdministrationClient(object):
"""Provides administrative APIs for managing an instance of the Attestation Service.
:param str instance_url: base url of the service
:param credential: Credentials for the caller used to interact with the service.
:type credential: azure.core.credentials.TokenCredential
:keyword Pipeline pipeline: If omitted, the standard pipeline is used.
:keyword HttpTransport transport: If omitted, the standard pipeline is used.
:keyword list[HTTPPolicy] policies: If omitted, the standard pipeline is used.
"""
def __init__(
self,
credential, # type: "TokenCredential"
instance_url, # type: str
**kwargs # type: Any
):
# type: (...) -> None
if not credential:
raise ValueError("Missing credential.")
self._config = AttestationClientConfiguration(credential, instance_url, **kwargs)
self._client = AzureAttestationRestClient(credential, instance_url, **kwargs)
self._statelock = Lock()
self._signing_certificates = None
@distributed_trace
def get_policy(self, attestation_type, **kwargs):
#type(AttestationType, **Any) -> AttestationResult[str]:
""" Retrieves the attestation policy for a specified attestation type.
:param azure.security.attestation.AttestationType attestation_type: :class:`azure.security.attestation.AttestationType` for
which to retrieve the policy.
:return AttestationResponse[str]: Attestation service response encapsulating a string attestation policy.
"""
policyResult = self._client.policy.get(attestation_type, **kwargs)
token = AttestationToken[PolicyResult](token=policyResult.token, body_type=PolicyResult)
token_body = token.get_body()
stored_policy = AttestationToken[StoredAttestationPolicy](token=token_body.policy, body_type=StoredAttestationPolicy)
actual_policy = stored_policy.get_body().attestation_policy #type: bytes
if self._config.token_validation_options.validate_token:
token.validate_token(self._config.token_validation_options, self._get_signers(**kwargs))
return AttestationResponse[str](token, actual_policy.decode('utf-8'))
@distributed_trace
def set_policy(self, attestation_type, attestation_policy, signing_key=None, **kwargs):
#type:(AttestationType, str, Optional[AttestationSigningKey], **Any) -> AttestationResponse[PolicyResult]
""" Sets the attestation policy for the specified attestation type.
:param azure.security.attestation.AttestationType attestation_type: :class:`azure.security.attestation.AttestationType` for
which to set the policy.
:param str attestation_policy: Attestation policy to be set.
:param Optional[AttestationSigningKey] signing_key: Optional signing key to be
used to sign the policy before sending it to the service.
:return AttestationResponse[PolicyResult]: Attestation service response encapsulating a :class:`PolicyResult`.
.. note::
If the attestation instance is in *Isolated* mode, then the
`signing_key` parameter MUST be a signing key containing one of the
certificates returned by :meth:`get_policy_management_certificates`.
If the attestation instance is in *AAD* mode, then the `signing_key`
parameter does not need to be provided.
"""
policy_token = AttestationToken[StoredAttestationPolicy](
body=StoredAttestationPolicy(attestation_policy = attestation_policy.encode('ascii')),
signer=signing_key,
body_type=StoredAttestationPolicy)
policyResult = self._client.policy.set(attestation_type=attestation_type, new_attestation_policy=policy_token.serialize(), **kwargs)
token = AttestationToken[PolicyResult](token=policyResult.token,
body_type=PolicyResult)
if self._config.token_validation_options.validate_token:
if not token.validate_token(self._config.token_validation_options, self._get_signers(**kwargs)):
raise Exception("Token Validation of PolicySet API failed.")
return AttestationResponse[PolicyResult](token, token.get_body())
@distributed_trace
def get_policy_management_certificates(self, **kwargs):
#type:(**Any) -> AttestationResponse[list[list[bytes]]]
""" Retrieves the set of policy management certificates for the instance.
The list of policy management certificates will only be non-empty if the
attestation service instance is in Isolated mode.
:return AttestationResponse[list[list[bytes]]: Attestation service response
encapsulating a list of DER encoded X.509 certificate chains.
"""
cert_response = self._client.policy_certificates.get(**kwargs)
token = AttestationToken[PolicyCertificatesResult](
token=cert_response.token,
body_type=PolicyCertificatesResult)
if self._config.token_validation_options.validate_token:
if not token.validate_token(self._config.token_validation_options, self._get_signers(**kwargs)):
raise Exception("Token Validation of PolicyCertificates API failed.")
certificates = []
cert_list = token.get_body()
for key in cert_list.policy_certificates.keys:
key_certs = [base64.b64decode(cert) for cert in key.x5_c]
certificates.append(key_certs)
return AttestationResponse(token, certificates)
@distributed_trace
def add_policy_management_certificate(self, certificate_to_add, signing_key, **kwargs):
#type:(bytes, AttestationSigningKey, **Any)-> AttestationResponse[PolicyCertificatesModificationResult]
""" Adds a new policy management certificate to the set of policy management certificates for the instance.
:param bytes certificate_to_add: DER encoded X.509 certificate to add to
the list of attestation policy management certificates.
:param AttestationSigningKey signing_key: Signing Key representing one of
the *existing* attestation signing certificates.
:return AttestationResponse[PolicyCertificatesModificationResult]: Attestation service response
encapsulating the status of the add request.
The :class:`PolicyCertificatesModificationResult` response to the
:meth:`add_policy_management_certificate` API contains two attributes
of interest.
The first is `certificate_resolution`, which indicates
whether the certificate in question is present in the set of policy
management certificates after the operation has completed, or if it is
absent.
The second is the `thumbprint` of the certificate added. The `thumbprint`
for the certificate is the SHA1 hash of the DER encoding of the
certificate.
"""
key=JSONWebKey(kty='RSA', x5_c = [ base64.b64encode(certificate_to_add).decode('ascii')])
add_body = AttestationCertificateManagementBody(policy_certificate=key)
cert_add_token = AttestationToken[AttestationCertificateManagementBody](
body=add_body,
signer=signing_key,
body_type=AttestationCertificateManagementBody)
cert_response = self._client.policy_certificates.add(cert_add_token.serialize(), **kwargs)
token = AttestationToken[GeneratedPolicyCertificatesModificationResult](token=cert_response.token,
body_type=GeneratedPolicyCertificatesModificationResult)
if self._config.token_validation_options.validate_token:
if not token.validate_token(self._config.token_validation_options, self._get_signers(**kwargs)):
raise Exception("Token Validation of PolicyCertificate Add API failed.")
return AttestationResponse[PolicyCertificatesModificationResult](token, PolicyCertificatesModificationResult._from_generated(token.get_body()))
@distributed_trace
def remove_policy_management_certificate(self, certificate_to_add, signing_key, **kwargs):
#type:(bytes, AttestationSigningKey, **Any)-> AttestationResponse[PolicyCertificatesModificationResult]
""" Removes a new policy management certificate to the set of policy management certificates for the instance.
:param bytes certificate_to_add: DER encoded X.509 certificate to add to
the list of attestation policy management certificates.
:param AttestationSigningKey signing_key: Signing Key representing one of
the *existing* attestation signing certificates.
:return AttestationResponse[PolicyCertificatesModificationResult]: Attestation service response
encapsulating a list of DER encoded X.509 certificate chains.
The :class:`PolicyCertificatesModificationResult` response to the
:meth:`remove_policy_management_certificate` API contains two attributes
of interest.
The first is `certificate_resolution`, which indicates
whether the certificate in question is present in the set of policy
management certificates after the operation has completed, or if it is
absent.
The second is the `thumbprint` of the certificate added. The `thumbprint`
for the certificate is the SHA1 hash of the DER encoding of the
certificate.
"""
key=JSONWebKey(kty='RSA', x5_c = [ base64.b64encode(certificate_to_add).decode('ascii')])
add_body = AttestationCertificateManagementBody(policy_certificate=key)
cert_add_token = AttestationToken[AttestationCertificateManagementBody](
body=add_body,
signer=signing_key,
body_type=AttestationCertificateManagementBody)
cert_response = self._client.policy_certificates.remove(cert_add_token.serialize(), **kwargs)
token = AttestationToken[GeneratedPolicyCertificatesModificationResult](token=cert_response.token,
body_type=GeneratedPolicyCertificatesModificationResult)
if self._config.token_validation_options.validate_token:
if not token.validate_token(self._config.token_validation_options, self._get_signers(**kwargs)):
raise Exception("Token Validation of PolicyCertificate Remove API failed.")
return AttestationResponse[PolicyCertificatesModificationResult](token, PolicyCertificatesModificationResult._from_generated(token.get_body()))
def _get_signers(self, **kwargs):
#type(**Any) -> List[AttestationSigner]
""" Returns the set of signing certificates used to sign attestation tokens.
"""
with self._statelock:
if (self._signing_certificates == None):
signing_certificates = self._client.signing_certificates.get(**kwargs)
self._signing_certificates = []
for key in signing_certificates.keys:
# Convert the returned certificate chain into an array of X.509 Certificates.
certificates = []
for x5c in key.x5_c:
der_cert = base64.b64decode(x5c)
certificates.append(der_cert)
self._signing_certificates.append(AttestationSigner(certificates, key.kid))
signers = self._signing_certificates
return signers
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> AttestationAdministrationClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
| 52.180392
| 223
| 0.713888
|
6d6353e8ab5a417c54b97d05deee4f9b291f98a0
| 18,092
|
py
|
Python
|
examples/entity_disambiguation/utils.py
|
Raabia-Asif/luke
|
9323b216dd5f72b61545bc4133f7709fd19bfa95
|
[
"Apache-2.0"
] | null | null | null |
examples/entity_disambiguation/utils.py
|
Raabia-Asif/luke
|
9323b216dd5f72b61545bc4133f7709fd19bfa95
|
[
"Apache-2.0"
] | null | null | null |
examples/entity_disambiguation/utils.py
|
Raabia-Asif/luke
|
9323b216dd5f72b61545bc4133f7709fd19bfa95
|
[
"Apache-2.0"
] | null | null | null |
# This code is based on the code obtained from here:
# https://github.com/lephong/mulrel-nel/blob/db14942450f72c87a4d46349860e96ef2edf353d/nel/dataset.py
import copy
import logging
import math
import os
import re
from collections import defaultdict
import numpy as np
logger = logging.getLogger(__name__)
punc_remover = re.compile(r"[\W]+")
class EntityDisambiguationDataset(object):
def __init__(self, dataset_dir, wikipedia_titles_file=None, wikipedia_redirects_file=None):
person_names = frozenset(load_person_names(os.path.join(dataset_dir, "persons.txt")))
self.train = load_documents(
os.path.join(dataset_dir, "aida_train.csv"), os.path.join(dataset_dir, "aida_train.txt"), person_names
)
self.test_a = load_documents(
os.path.join(dataset_dir, "aida_testA.csv"),
os.path.join(dataset_dir, "testa_testb_aggregate_original"),
person_names,
)
self.test_b = load_documents(
os.path.join(dataset_dir, "aida_testB.csv"),
os.path.join(dataset_dir, "testa_testb_aggregate_original"),
person_names,
)
self.ace2004 = load_documents(
os.path.join(dataset_dir, "wned-ace2004.csv"), os.path.join(dataset_dir, "ace2004.conll"), person_names
)
self.aquaint = load_documents(
os.path.join(dataset_dir, "wned-aquaint.csv"), os.path.join(dataset_dir, "aquaint.conll"), person_names
)
self.clueweb = load_documents(
os.path.join(dataset_dir, "wned-clueweb.csv"), os.path.join(dataset_dir, "clueweb.conll"), person_names
)
self.msnbc = load_documents(
os.path.join(dataset_dir, "wned-msnbc.csv"), os.path.join(dataset_dir, "msnbc.conll"), person_names
)
self.wikipedia = load_documents(
os.path.join(dataset_dir, "wned-wikipedia.csv"), os.path.join(dataset_dir, "wikipedia.conll"), person_names
)
self.test_a_ppr = load_ppr_candidates(
copy.deepcopy(self.test_a), os.path.join(dataset_dir, "pershina_candidates")
)
self.test_b_ppr = load_ppr_candidates(
copy.deepcopy(self.test_b), os.path.join(dataset_dir, "pershina_candidates")
)
valid_titles = None
if wikipedia_titles_file:
with open(wikipedia_titles_file) as f:
valid_titles = frozenset([t.rstrip() for t in f])
redirects = {}
if wikipedia_redirects_file:
with open(wikipedia_redirects_file) as f:
for line in f:
(src, dest) = line.rstrip().split("\t")
redirects[src] = dest
# build entity vocabulary and resolve Wikipedia redirects
for documents in self.get_all_datasets():
for document in documents:
new_mentions = []
for mention in document.mentions:
mention.title = redirects.get(mention.title, mention.title)
if valid_titles and mention.title not in valid_titles:
logger.debug("Invalid title: %s", mention.title)
continue
new_mentions.append(mention)
for candidate in mention.candidates:
candidate.title = redirects.get(candidate.title, candidate.title)
document.mentions = new_mentions
def get_all_datasets(self):
return (
self.train,
self.test_a,
self.test_b,
self.ace2004,
self.aquaint,
self.clueweb,
self.msnbc,
self.wikipedia,
self.test_a_ppr,
self.test_b_ppr,
)
class Document(object):
def __init__(self, id_, words, mentions):
self.id = id_
self.words = words
self.mentions = mentions
def __repr__(self):
return "<Document %s...>" % (" ".join(self.words[:3]),)
class Mention(object):
def __init__(self, text, title, start, end, candidates):
self.text = text
self.start = start
self.end = end
self.title = title
self.candidates = candidates
@property
def span(self):
return self.start, self.end
def __repr__(self):
return "<Mention %s->%s>" % (self.text, self.title)
class Candidate(object):
def __init__(self, title, prior_prob):
self.title = title
self.prior_prob = prior_prob
def __repr__(self):
return "<Candidate %s (prior prob: %.3f)>" % (self.title, self.prior_prob)
class InputFeatures(object):
def __init__(
self,
document,
mentions,
word_ids,
word_segment_ids,
word_attention_mask,
entity_ids,
entity_position_ids,
entity_segment_ids,
entity_attention_mask,
entity_candidate_ids,
target_mention_indices,
):
self.document = document
self.mentions = mentions
self.word_ids = word_ids
self.word_segment_ids = word_segment_ids
self.word_attention_mask = word_attention_mask
self.entity_ids = entity_ids
self.entity_position_ids = entity_position_ids
self.entity_segment_ids = entity_segment_ids
self.entity_attention_mask = entity_attention_mask
self.entity_candidate_ids = entity_candidate_ids
self.target_mention_indices = target_mention_indices
def load_person_names(input_file):
with open(input_file) as f:
return [t.strip() for t in f]
def load_documents(csv_path, conll_path, person_names):
document_data = {}
mention_data = load_mentions_from_csv_file(csv_path, person_names)
with open(conll_path, "r") as f:
cur_doc = {}
for line in f:
line = line.strip()
if line.startswith("-DOCSTART-"):
doc_name = line.split()[1][1:]
document_data[doc_name] = dict(words=[], mentions=[], mention_spans=[])
cur_doc = document_data[doc_name]
else:
comps = line.split("\t")
if len(comps) >= 6:
tag = comps[1]
if tag == "I":
cur_doc["mention_spans"][-1]["end"] += 1
else:
cur_doc["mention_spans"].append(
dict(start=len(cur_doc["words"]), end=len(cur_doc["words"]) + 1)
)
cur_doc["words"].append(comps[0])
documents = []
# merge with the mention_data
for (doc_name, mentions) in mention_data.items():
# This document is excluded in Le and Titov 2018:
# https://github.com/lephong/mulrel-nel/blob/db14942450f72c87a4d46349860e96ef2edf353d/nel/dataset.py#L221
if doc_name == "Jiří_Třanovský Jiří_Třanovský":
continue
document = document_data[doc_name.split()[0]]
mention_span_index = 0
for mention in mentions:
mention_text = punc_remover.sub("", mention["text"].lower())
while True:
doc_mention_span = document["mention_spans"][mention_span_index]
doc_mention_text = " ".join(document["words"][doc_mention_span["start"] : doc_mention_span["end"]])
doc_mention_text = punc_remover.sub("", doc_mention_text.lower())
if doc_mention_text == mention_text:
mention.update(doc_mention_span)
document["mentions"].append(mention)
mention_span_index += 1
break
else:
mention_span_index += 1
mentions = [Mention(**o) for o in document["mentions"]]
documents.append(Document(doc_name, document["words"], mentions))
return documents
def load_mentions_from_csv_file(path, person_names):
mention_data = defaultdict(list)
with open(path, "r") as f:
for line in f:
comps = line.strip().split("\t")
doc_name = comps[0] + " " + comps[1]
mention_text = comps[2]
if comps[6] != "EMPTYCAND":
candidates = [c.split(",") for c in comps[6:-2]]
candidates = [Candidate(",".join(c[2:]), float(c[1])) for c in candidates]
candidates = [c for c in candidates if c.title]
candidates = sorted(candidates, key=lambda c: c.prior_prob, reverse=True)
else:
candidates = []
title = comps[-1].split(",")
if title[0] == "-1":
title = ",".join(title[2:])
else:
title = ",".join(title[3:])
title = title.replace("&", "&")
if not title: # we use only mentions with valid referent entities
continue
mention_data[doc_name].append(dict(text=mention_text, candidates=candidates, title=title))
def find_coreference(target_mention, mention_list):
target_mention_text = target_mention["text"].lower()
ret = []
for mention in mention_list:
if not mention["candidates"] or mention["candidates"][0].title not in person_names:
continue
mention_text = mention["text"].lower()
if mention_text == target_mention_text:
continue
start_pos = mention_text.find(target_mention_text)
if start_pos == -1:
continue
end_pos = start_pos + len(target_mention_text) - 1
if (start_pos == 0 or mention_text[start_pos - 1] == " ") and (
end_pos == len(mention_text) - 1 or mention_text[end_pos + 1] == " "
):
ret.append(mention)
return ret
for _, mentions in mention_data.items():
for mention in mentions:
coref_mentions = find_coreference(mention, mentions)
if coref_mentions:
new_cands = defaultdict(int)
for coref_mention in coref_mentions:
for candidate in coref_mention["candidates"]:
new_cands[candidate.title] += candidate.prior_prob
for candidate_title in new_cands.keys():
new_cands[candidate_title] /= len(coref_mentions)
mention["candidates"] = sorted(
[Candidate(t, p) for (t, p) in new_cands.items()], key=lambda c: c.prior_prob, reverse=True
)
return mention_data
def load_ppr_candidates(documents, dataset_dir):
for document in documents:
target_file = os.path.join(os.path.join(dataset_dir, re.match(r"^\d*", document.id).group(0)))
candidates = []
with open(target_file) as f:
for line in f:
if line.startswith("ENTITY"):
mention_text = line.split("\t")[7][9:]
candidates.append((mention_text, []))
elif line.startswith("CANDIDATE"):
uri = line.split("\t")[5][4:]
title = uri[29:].replace("_", " ")
candidates[-1][1].append(title)
cur = 0
for mention in document.mentions:
text = punc_remover.sub("", mention.text.lower())
while text != punc_remover.sub("", candidates[cur][0].lower()):
cur += 1
mention.candidates = [Candidate(title, -1) for title in candidates[cur][1]]
cur += 1
return documents
def convert_documents_to_features(
documents,
tokenizer,
entity_vocab,
mode,
document_split_mode,
max_seq_length,
max_candidate_length,
max_mention_length,
):
max_num_tokens = max_seq_length - 2
def generate_feature_dict(tokens, mentions, doc_start, doc_end):
all_tokens = [tokenizer.cls_token] + tokens[doc_start:doc_end] + [tokenizer.sep_token]
word_ids = np.array(tokenizer.convert_tokens_to_ids(all_tokens), dtype=np.int)
word_attention_mask = np.ones(len(all_tokens), dtype=np.int)
word_segment_ids = np.zeros(len(all_tokens), dtype=np.int)
target_mention_data = []
for start, end, mention in mentions:
if start >= doc_start and end <= doc_end:
candidates = [c.title for c in mention.candidates[:max_candidate_length]]
if mode == "train" and mention.title not in candidates:
continue
target_mention_data.append((start - doc_start, end - doc_start, mention, candidates))
entity_ids = np.empty(len(target_mention_data), dtype=np.int)
entity_attention_mask = np.ones(len(target_mention_data), dtype=np.int)
entity_segment_ids = np.zeros(len(target_mention_data), dtype=np.int)
entity_position_ids = np.full((len(target_mention_data), max_mention_length), -1, dtype=np.int)
entity_candidate_ids = np.zeros((len(target_mention_data), max_candidate_length), dtype=np.int)
for index, (start, end, mention, candidates) in enumerate(target_mention_data):
entity_ids[index] = entity_vocab[mention.title]
entity_position_ids[index][: end - start] = range(start + 1, end + 1) # +1 for [CLS]
entity_candidate_ids[index, : len(candidates)] = [entity_vocab[cand] for cand in candidates]
output_mentions = [mention for _, _, mention, _ in target_mention_data]
return (
output_mentions,
dict(
word_ids=word_ids,
word_segment_ids=word_segment_ids,
word_attention_mask=word_attention_mask,
entity_ids=entity_ids,
entity_position_ids=entity_position_ids,
entity_segment_ids=entity_segment_ids,
entity_attention_mask=entity_attention_mask,
entity_candidate_ids=entity_candidate_ids,
),
)
ret = []
for document in documents:
tokens = []
mention_data = []
cur = 0
for mention in document.mentions:
tokens += tokenizer.tokenize(" ".join(document.words[cur : mention.start]))
mention_tokens = tokenizer.tokenize(" ".join(document.words[mention.start : mention.end]))
mention_data.append((len(tokens), len(tokens) + len(mention_tokens), mention))
tokens += mention_tokens
cur = mention.end
tokens += tokenizer.tokenize(" ".join(document.words[cur:]))
if len(tokens) > max_num_tokens:
if document_split_mode == "simple":
in_mention_flag = [False] * len(tokens)
for n, obj in enumerate(mention_data):
in_mention_flag[obj[0] : obj[1]] = [n] * (obj[1] - obj[0])
num_splits = math.ceil(len(tokens) / max_num_tokens)
tokens_per_batch = math.ceil(len(tokens) / num_splits)
doc_start = 0
while True:
doc_end = min(len(tokens), doc_start + tokens_per_batch)
if mode != "train":
while True:
if (
doc_end == len(tokens)
or not in_mention_flag[doc_end - 1]
or (in_mention_flag[doc_end - 1] != in_mention_flag[doc_end])
):
break
doc_end -= 1
output_mentions, feature_dict = generate_feature_dict(tokens, mention_data, doc_start, doc_end)
if output_mentions:
ret.append(
InputFeatures(
document=document,
mentions=output_mentions,
target_mention_indices=range(len(output_mentions)),
**feature_dict
)
)
if doc_end == len(tokens):
break
doc_start = doc_end
else:
for mention_index, (start, end, mention) in enumerate(mention_data):
left_token_length = start
right_token_length = len(tokens) - end
mention_length = end - start
half_context_size = int((max_num_tokens - mention_length) / 2)
if left_token_length < right_token_length:
left_cxt_length = min(left_token_length, half_context_size)
right_cxt_length = min(right_token_length, max_num_tokens - left_cxt_length - mention_length)
else:
right_cxt_length = min(right_token_length, half_context_size)
left_cxt_length = min(left_token_length, max_num_tokens - right_cxt_length - mention_length)
input_mentions = (
[mention_data[mention_index]] + mention_data[:mention_index] + mention_data[mention_index + 1 :]
)
output_mentions, feature_dict = generate_feature_dict(
tokens, input_mentions, start - left_cxt_length, end + right_cxt_length
)
ret.append(
InputFeatures(
document=document, mentions=output_mentions, target_mention_indices=[0], **feature_dict
)
)
else:
output_mentions, feature_dict = generate_feature_dict(tokens, mention_data, 0, len(tokens))
ret.append(
InputFeatures(
document=document,
mentions=output_mentions,
target_mention_indices=range(len(output_mentions)),
**feature_dict
)
)
return ret
| 39.588621
| 120
| 0.569368
|
becdd5f65cb0b3e70266abfc15a4dbdc41f63157
| 1,167
|
py
|
Python
|
test/test_deployment_template_resource.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
test/test_deployment_template_resource.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
test/test_deployment_template_resource.py
|
cvent/octopus-deploy-api-client
|
0e03e842e1beb29b132776aee077df570b88366a
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Octopus Server API
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 2019.6.7+Branch.tags-2019.6.7.Sha.aa18dc6809953218c66f57eff7d26481d9b23d6a
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import octopus_deploy_swagger_client
from octopus_deploy_swagger_client.models.deployment_template_resource import DeploymentTemplateResource # noqa: E501
from octopus_deploy_swagger_client.rest import ApiException
class TestDeploymentTemplateResource(unittest.TestCase):
"""DeploymentTemplateResource unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testDeploymentTemplateResource(self):
"""Test DeploymentTemplateResource"""
# FIXME: construct object with mandatory attributes with example values
# model = octopus_deploy_swagger_client.models.deployment_template_resource.DeploymentTemplateResource() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 28.463415
| 126
| 0.760069
|
30d33ee88e62767835fb63e2577a3a5c0cfa9e16
| 131,771
|
py
|
Python
|
venv/Lib/site-packages/sqlalchemy/orm/session.py
|
YunJaePark3908/BaseAPIServer
|
17ab922917541406a3c2d75b428614ce97152a16
|
[
"Apache-2.0"
] | 2
|
2021-02-20T22:43:47.000Z
|
2021-05-06T03:43:20.000Z
|
venv/Lib/site-packages/sqlalchemy/orm/session.py
|
YunJaePark3908/BaseAPIServer
|
17ab922917541406a3c2d75b428614ce97152a16
|
[
"Apache-2.0"
] | 8
|
2021-03-26T19:13:07.000Z
|
2021-04-19T18:34:33.000Z
|
venv/Lib/site-packages/sqlalchemy/orm/session.py
|
YunJaePark3908/BaseAPIServer
|
17ab922917541406a3c2d75b428614ce97152a16
|
[
"Apache-2.0"
] | 3
|
2021-11-30T11:10:26.000Z
|
2021-12-08T05:59:31.000Z
|
# orm/session.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provides the Session class and related utilities."""
import itertools
import sys
import weakref
from . import attributes
from . import exc
from . import identity
from . import loading
from . import persistence
from . import query
from . import state as statelib
from .base import _class_to_mapper
from .base import _none_set
from .base import _state_mapper
from .base import instance_str
from .base import object_mapper
from .base import object_state
from .base import state_str
from .deprecated_interfaces import SessionExtension
from .unitofwork import UOWTransaction
from .. import engine
from .. import exc as sa_exc
from .. import sql
from .. import util
from ..inspection import inspect
from ..sql import expression
from ..sql import util as sql_util
__all__ = ["Session", "SessionTransaction", "SessionExtension", "sessionmaker"]
_sessions = weakref.WeakValueDictionary()
"""Weak-referencing dictionary of :class:`.Session` objects."""
def _state_session(state):
"""Given an :class:`.InstanceState`, return the :class:`.Session`
associated, if any.
"""
if state.session_id:
try:
return _sessions[state.session_id]
except KeyError:
pass
return None
class _SessionClassMethods(object):
"""Class-level methods for :class:`.Session`, :class:`.sessionmaker`."""
@classmethod
@util.deprecated(
"1.3",
"The :meth:`.Session.close_all` method is deprecated and will be "
"removed in a future release. Please refer to "
":func:`.session.close_all_sessions`.",
)
def close_all(cls):
"""Close *all* sessions in memory."""
close_all_sessions()
@classmethod
@util.dependencies("sqlalchemy.orm.util")
def identity_key(cls, orm_util, *args, **kwargs):
"""Return an identity key.
This is an alias of :func:`.util.identity_key`.
"""
return orm_util.identity_key(*args, **kwargs)
@classmethod
def object_session(cls, instance):
"""Return the :class:`.Session` to which an object belongs.
This is an alias of :func:`.object_session`.
"""
return object_session(instance)
ACTIVE = util.symbol("ACTIVE")
PREPARED = util.symbol("PREPARED")
COMMITTED = util.symbol("COMMITTED")
DEACTIVE = util.symbol("DEACTIVE")
CLOSED = util.symbol("CLOSED")
class SessionTransaction(object):
"""A :class:`.Session`-level transaction.
:class:`.SessionTransaction` is a mostly behind-the-scenes object
not normally referenced directly by application code. It coordinates
among multiple :class:`_engine.Connection` objects, maintaining a database
transaction for each one individually, committing or rolling them
back all at once. It also provides optional two-phase commit behavior
which can augment this coordination operation.
The :attr:`.Session.transaction` attribute of :class:`.Session`
refers to the current :class:`.SessionTransaction` object in use, if any.
The :attr:`.SessionTransaction.parent` attribute refers to the parent
:class:`.SessionTransaction` in the stack of :class:`.SessionTransaction`
objects. If this attribute is ``None``, then this is the top of the stack.
If non-``None``, then this :class:`.SessionTransaction` refers either
to a so-called "subtransaction" or a "nested" transaction. A
"subtransaction" is a scoping concept that demarcates an inner portion
of the outermost "real" transaction. A nested transaction, which
is indicated when the :attr:`.SessionTransaction.nested`
attribute is also True, indicates that this :class:`.SessionTransaction`
corresponds to a SAVEPOINT.
**Life Cycle**
A :class:`.SessionTransaction` is associated with a :class:`.Session` in
its default mode of ``autocommit=False`` immediately, associated
with no database connections. As the :class:`.Session` is called upon
to emit SQL on behalf of various :class:`_engine.Engine` or
:class:`_engine.Connection`
objects, a corresponding :class:`_engine.Connection` and associated
:class:`.Transaction` is added to a collection within the
:class:`.SessionTransaction` object, becoming one of the
connection/transaction pairs maintained by the
:class:`.SessionTransaction`. The start of a :class:`.SessionTransaction`
can be tracked using the :meth:`.SessionEvents.after_transaction_create`
event.
The lifespan of the :class:`.SessionTransaction` ends when the
:meth:`.Session.commit`, :meth:`.Session.rollback` or
:meth:`.Session.close` methods are called. At this point, the
:class:`.SessionTransaction` removes its association with its parent
:class:`.Session`. A :class:`.Session` that is in ``autocommit=False``
mode will create a new :class:`.SessionTransaction` to replace it
immediately, whereas a :class:`.Session` that's in ``autocommit=True``
mode will remain without a :class:`.SessionTransaction` until the
:meth:`.Session.begin` method is called. The end of a
:class:`.SessionTransaction` can be tracked using the
:meth:`.SessionEvents.after_transaction_end` event.
**Nesting and Subtransactions**
Another detail of :class:`.SessionTransaction` behavior is that it is
capable of "nesting". This means that the :meth:`.Session.begin` method
can be called while an existing :class:`.SessionTransaction` is already
present, producing a new :class:`.SessionTransaction` that temporarily
replaces the parent :class:`.SessionTransaction`. When a
:class:`.SessionTransaction` is produced as nested, it assigns itself to
the :attr:`.Session.transaction` attribute, and it additionally will assign
the previous :class:`.SessionTransaction` to its :attr:`.Session.parent`
attribute. The behavior is effectively a
stack, where :attr:`.Session.transaction` refers to the current head of
the stack, and the :attr:`.SessionTransaction.parent` attribute allows
traversal up the stack until :attr:`.SessionTransaction.parent` is
``None``, indicating the top of the stack.
When the scope of :class:`.SessionTransaction` is ended via
:meth:`.Session.commit` or :meth:`.Session.rollback`, it restores its
parent :class:`.SessionTransaction` back onto the
:attr:`.Session.transaction` attribute.
The purpose of this stack is to allow nesting of
:meth:`.Session.rollback` or :meth:`.Session.commit` calls in context
with various flavors of :meth:`.Session.begin`. This nesting behavior
applies to when :meth:`.Session.begin_nested` is used to emit a
SAVEPOINT transaction, and is also used to produce a so-called
"subtransaction" which allows a block of code to use a
begin/rollback/commit sequence regardless of whether or not its enclosing
code block has begun a transaction. The :meth:`.flush` method, whether
called explicitly or via autoflush, is the primary consumer of the
"subtransaction" feature, in that it wishes to guarantee that it works
within in a transaction block regardless of whether or not the
:class:`.Session` is in transactional mode when the method is called.
Note that the flush process that occurs within the "autoflush" feature
as well as when the :meth:`.Session.flush` method is used **always**
creates a :class:`.SessionTransaction` object. This object is normally
a subtransaction, unless the :class:`.Session` is in autocommit mode
and no transaction exists at all, in which case it's the outermost
transaction. Any event-handling logic or other inspection logic
needs to take into account whether a :class:`.SessionTransaction`
is the outermost transaction, a subtransaction, or a "nested" / SAVEPOINT
transaction.
.. seealso::
:meth:`.Session.rollback`
:meth:`.Session.commit`
:meth:`.Session.begin`
:meth:`.Session.begin_nested`
:attr:`.Session.is_active`
:meth:`.SessionEvents.after_transaction_create`
:meth:`.SessionEvents.after_transaction_end`
:meth:`.SessionEvents.after_commit`
:meth:`.SessionEvents.after_rollback`
:meth:`.SessionEvents.after_soft_rollback`
"""
_rollback_exception = None
def __init__(self, session, parent=None, nested=False):
self.session = session
self._connections = {}
self._parent = parent
self.nested = nested
self._state = ACTIVE
if not parent and nested:
raise sa_exc.InvalidRequestError(
"Can't start a SAVEPOINT transaction when no existing "
"transaction is in progress"
)
if self.session._enable_transaction_accounting:
self._take_snapshot()
self.session.dispatch.after_transaction_create(self.session, self)
@property
def parent(self):
"""The parent :class:`.SessionTransaction` of this
:class:`.SessionTransaction`.
If this attribute is ``None``, indicates this
:class:`.SessionTransaction` is at the top of the stack, and
corresponds to a real "COMMIT"/"ROLLBACK"
block. If non-``None``, then this is either a "subtransaction"
or a "nested" / SAVEPOINT transaction. If the
:attr:`.SessionTransaction.nested` attribute is ``True``, then
this is a SAVEPOINT, and if ``False``, indicates this a subtransaction.
.. versionadded:: 1.0.16 - use ._parent for previous versions
"""
return self._parent
nested = False
"""Indicates if this is a nested, or SAVEPOINT, transaction.
When :attr:`.SessionTransaction.nested` is True, it is expected
that :attr:`.SessionTransaction.parent` will be True as well.
"""
@property
def is_active(self):
return self.session is not None and self._state is ACTIVE
def _assert_active(
self,
prepared_ok=False,
rollback_ok=False,
deactive_ok=False,
closed_msg="This transaction is closed",
):
if self._state is COMMITTED:
raise sa_exc.InvalidRequestError(
"This session is in 'committed' state; no further "
"SQL can be emitted within this transaction."
)
elif self._state is PREPARED:
if not prepared_ok:
raise sa_exc.InvalidRequestError(
"This session is in 'prepared' state; no further "
"SQL can be emitted within this transaction."
)
elif self._state is DEACTIVE:
if not deactive_ok and not rollback_ok:
if self._rollback_exception:
raise sa_exc.InvalidRequestError(
"This Session's transaction has been rolled back "
"due to a previous exception during flush."
" To begin a new transaction with this Session, "
"first issue Session.rollback()."
" Original exception was: %s"
% self._rollback_exception,
code="7s2a",
)
elif not deactive_ok:
raise sa_exc.InvalidRequestError(
"This session is in 'inactive' state, due to the "
"SQL transaction being rolled back; no further "
"SQL can be emitted within this transaction."
)
elif self._state is CLOSED:
raise sa_exc.ResourceClosedError(closed_msg)
@property
def _is_transaction_boundary(self):
return self.nested or not self._parent
def connection(self, bindkey, execution_options=None, **kwargs):
self._assert_active()
bind = self.session.get_bind(bindkey, **kwargs)
return self._connection_for_bind(bind, execution_options)
def _begin(self, nested=False):
self._assert_active()
return SessionTransaction(self.session, self, nested=nested)
def _iterate_self_and_parents(self, upto=None):
current = self
result = ()
while current:
result += (current,)
if current._parent is upto:
break
elif current._parent is None:
raise sa_exc.InvalidRequestError(
"Transaction %s is not on the active transaction list"
% (upto)
)
else:
current = current._parent
return result
def _take_snapshot(self):
if not self._is_transaction_boundary:
self._new = self._parent._new
self._deleted = self._parent._deleted
self._dirty = self._parent._dirty
self._key_switches = self._parent._key_switches
return
if not self.session._flushing:
self.session.flush()
self._new = weakref.WeakKeyDictionary()
self._deleted = weakref.WeakKeyDictionary()
self._dirty = weakref.WeakKeyDictionary()
self._key_switches = weakref.WeakKeyDictionary()
def _restore_snapshot(self, dirty_only=False):
"""Restore the restoration state taken before a transaction began.
Corresponds to a rollback.
"""
assert self._is_transaction_boundary
to_expunge = set(self._new).union(self.session._new)
self.session._expunge_states(to_expunge, to_transient=True)
for s, (oldkey, newkey) in self._key_switches.items():
# we probably can do this conditionally based on
# if we expunged or not, but safe_discard does that anyway
self.session.identity_map.safe_discard(s)
# restore the old key
s.key = oldkey
# now restore the object, but only if we didn't expunge
if s not in to_expunge:
self.session.identity_map.replace(s)
for s in set(self._deleted).union(self.session._deleted):
self.session._update_impl(s, revert_deletion=True)
assert not self.session._deleted
for s in self.session.identity_map.all_states():
if not dirty_only or s.modified or s in self._dirty:
s._expire(s.dict, self.session.identity_map._modified)
def _remove_snapshot(self):
"""Remove the restoration state taken before a transaction began.
Corresponds to a commit.
"""
assert self._is_transaction_boundary
if not self.nested and self.session.expire_on_commit:
for s in self.session.identity_map.all_states():
s._expire(s.dict, self.session.identity_map._modified)
statelib.InstanceState._detach_states(
list(self._deleted), self.session
)
self._deleted.clear()
elif self.nested:
self._parent._new.update(self._new)
self._parent._dirty.update(self._dirty)
self._parent._deleted.update(self._deleted)
self._parent._key_switches.update(self._key_switches)
def _connection_for_bind(self, bind, execution_options):
self._assert_active()
if bind in self._connections:
if execution_options:
util.warn(
"Connection is already established for the "
"given bind; execution_options ignored"
)
return self._connections[bind][0]
local_connect = False
if self._parent:
conn = self._parent._connection_for_bind(bind, execution_options)
if not self.nested:
return conn
else:
if isinstance(bind, engine.Connection):
conn = bind
if conn.engine in self._connections:
raise sa_exc.InvalidRequestError(
"Session already has a Connection associated for the "
"given Connection's Engine"
)
else:
conn = bind._contextual_connect()
local_connect = True
try:
if execution_options:
conn = conn.execution_options(**execution_options)
if self.session.twophase and self._parent is None:
transaction = conn.begin_twophase()
elif self.nested:
transaction = conn.begin_nested()
else:
transaction = conn.begin()
except:
# connection will not not be associated with this Session;
# close it immediately so that it isn't closed under GC
if local_connect:
conn.close()
raise
else:
self._connections[conn] = self._connections[conn.engine] = (
conn,
transaction,
conn is not bind,
)
self.session.dispatch.after_begin(self.session, self, conn)
return conn
def prepare(self):
if self._parent is not None or not self.session.twophase:
raise sa_exc.InvalidRequestError(
"'twophase' mode not enabled, or not root transaction; "
"can't prepare."
)
self._prepare_impl()
def _prepare_impl(self):
self._assert_active()
if self._parent is None or self.nested:
self.session.dispatch.before_commit(self.session)
stx = self.session.transaction
if stx is not self:
for subtransaction in stx._iterate_self_and_parents(upto=self):
subtransaction.commit()
if not self.session._flushing:
for _flush_guard in range(100):
if self.session._is_clean():
break
self.session.flush()
else:
raise exc.FlushError(
"Over 100 subsequent flushes have occurred within "
"session.commit() - is an after_flush() hook "
"creating new objects?"
)
if self._parent is None and self.session.twophase:
try:
for t in set(self._connections.values()):
t[1].prepare()
except:
with util.safe_reraise():
self.rollback()
self._state = PREPARED
def commit(self):
self._assert_active(prepared_ok=True)
if self._state is not PREPARED:
self._prepare_impl()
if self._parent is None or self.nested:
for t in set(self._connections.values()):
t[1].commit()
self._state = COMMITTED
self.session.dispatch.after_commit(self.session)
if self.session._enable_transaction_accounting:
self._remove_snapshot()
self.close()
return self._parent
def rollback(self, _capture_exception=False):
self._assert_active(prepared_ok=True, rollback_ok=True)
stx = self.session.transaction
if stx is not self:
for subtransaction in stx._iterate_self_and_parents(upto=self):
subtransaction.close()
boundary = self
rollback_err = None
if self._state in (ACTIVE, PREPARED):
for transaction in self._iterate_self_and_parents():
if transaction._parent is None or transaction.nested:
try:
for t in set(transaction._connections.values()):
t[1].rollback()
transaction._state = DEACTIVE
self.session.dispatch.after_rollback(self.session)
except:
rollback_err = sys.exc_info()
finally:
transaction._state = DEACTIVE
if self.session._enable_transaction_accounting:
transaction._restore_snapshot(
dirty_only=transaction.nested
)
boundary = transaction
break
else:
transaction._state = DEACTIVE
sess = self.session
if (
not rollback_err
and sess._enable_transaction_accounting
and not sess._is_clean()
):
# if items were added, deleted, or mutated
# here, we need to re-restore the snapshot
util.warn(
"Session's state has been changed on "
"a non-active transaction - this state "
"will be discarded."
)
boundary._restore_snapshot(dirty_only=boundary.nested)
self.close()
if self._parent and _capture_exception:
self._parent._rollback_exception = sys.exc_info()[1]
if rollback_err:
util.raise_(rollback_err[1], with_traceback=rollback_err[2])
sess.dispatch.after_soft_rollback(sess, self)
return self._parent
def close(self, invalidate=False):
self.session.transaction = self._parent
if self._parent is None:
for connection, transaction, autoclose in set(
self._connections.values()
):
if invalidate:
connection.invalidate()
if autoclose:
connection.close()
else:
transaction.close()
self._state = CLOSED
self.session.dispatch.after_transaction_end(self.session, self)
if self._parent is None:
if not self.session.autocommit:
self.session.begin()
self.session = None
self._connections = None
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
self._assert_active(deactive_ok=True, prepared_ok=True)
if self.session.transaction is None:
return
if type_ is None:
try:
self.commit()
except:
with util.safe_reraise():
self.rollback()
else:
self.rollback()
class Session(_SessionClassMethods):
"""Manages persistence operations for ORM-mapped objects.
The Session's usage paradigm is described at :doc:`/orm/session`.
"""
public_methods = (
"__contains__",
"__iter__",
"add",
"add_all",
"begin",
"begin_nested",
"close",
"commit",
"connection",
"delete",
"execute",
"expire",
"expire_all",
"expunge",
"expunge_all",
"flush",
"get_bind",
"is_modified",
"bulk_save_objects",
"bulk_insert_mappings",
"bulk_update_mappings",
"merge",
"query",
"refresh",
"rollback",
"scalar",
)
@util.deprecated_params(
weak_identity_map=(
"1.0",
"The :paramref:`.Session.weak_identity_map` parameter as well as "
"the strong-referencing identity map are deprecated, and will be "
"removed in a future release. For the use case where objects "
"present in a :class:`.Session` need to be automatically strong "
"referenced, see the recipe at "
":ref:`session_referencing_behavior` for an event-based approach "
"to maintaining strong identity references. ",
),
_enable_transaction_accounting=(
"0.7",
"The :paramref:`.Session._enable_transaction_accounting` "
"parameter is deprecated and will be removed in a future release.",
),
extension=(
"0.7",
":class:`.SessionExtension` is deprecated in favor of the "
":class:`.SessionEvents` listener interface. The "
":paramref:`.Session.extension` parameter will be "
"removed in a future release.",
),
)
def __init__(
self,
bind=None,
autoflush=True,
expire_on_commit=True,
_enable_transaction_accounting=True,
autocommit=False,
twophase=False,
weak_identity_map=None,
binds=None,
extension=None,
enable_baked_queries=True,
info=None,
query_cls=None,
):
r"""Construct a new Session.
See also the :class:`.sessionmaker` function which is used to
generate a :class:`.Session`-producing callable with a given
set of arguments.
:param autocommit:
.. warning::
The autocommit flag is **not for general use**, and if it is
used, queries should only be invoked within the span of a
:meth:`.Session.begin` / :meth:`.Session.commit` pair. Executing
queries outside of a demarcated transaction is a legacy mode
of usage, and can in some cases lead to concurrent connection
checkouts.
Defaults to ``False``. When ``True``, the
:class:`.Session` does not keep a persistent transaction running,
and will acquire connections from the engine on an as-needed basis,
returning them immediately after their use. Flushes will begin and
commit (or possibly rollback) their own transaction if no
transaction is present. When using this mode, the
:meth:`.Session.begin` method is used to explicitly start
transactions.
.. seealso::
:ref:`session_autocommit`
:param autoflush: When ``True``, all query operations will issue a
:meth:`~.Session.flush` call to this ``Session`` before proceeding.
This is a convenience feature so that :meth:`~.Session.flush` need
not be called repeatedly in order for database queries to retrieve
results. It's typical that ``autoflush`` is used in conjunction
with ``autocommit=False``. In this scenario, explicit calls to
:meth:`~.Session.flush` are rarely needed; you usually only need to
call :meth:`~.Session.commit` (which flushes) to finalize changes.
:param bind: An optional :class:`_engine.Engine` or
:class:`_engine.Connection` to
which this ``Session`` should be bound. When specified, all SQL
operations performed by this session will execute via this
connectable.
:param binds: A dictionary which may specify any number of
:class:`_engine.Engine` or :class:`_engine.Connection`
objects as the source of
connectivity for SQL operations on a per-entity basis. The keys
of the dictionary consist of any series of mapped classes,
arbitrary Python classes that are bases for mapped classes,
:class:`_schema.Table` objects and :class:`_orm.Mapper` objects.
The
values of the dictionary are then instances of
:class:`_engine.Engine`
or less commonly :class:`_engine.Connection` objects.
Operations which
proceed relative to a particular mapped class will consult this
dictionary for the closest matching entity in order to determine
which :class:`_engine.Engine` should be used for a particular SQL
operation. The complete heuristics for resolution are
described at :meth:`.Session.get_bind`. Usage looks like::
Session = sessionmaker(binds={
SomeMappedClass: create_engine('postgresql://engine1'),
SomeDeclarativeBase: create_engine('postgresql://engine2'),
some_mapper: create_engine('postgresql://engine3'),
some_table: create_engine('postgresql://engine4'),
})
.. seealso::
:ref:`session_partitioning`
:meth:`.Session.bind_mapper`
:meth:`.Session.bind_table`
:meth:`.Session.get_bind`
:param \class_: Specify an alternate class other than
``sqlalchemy.orm.session.Session`` which should be used by the
returned class. This is the only argument that is local to the
:class:`.sessionmaker` function, and is not sent directly to the
constructor for ``Session``.
:param enable_baked_queries: defaults to ``True``. A flag consumed
by the :mod:`sqlalchemy.ext.baked` extension to determine if
"baked queries" should be cached, as is the normal operation
of this extension. When set to ``False``, all caching is disabled,
including baked queries defined by the calling application as
well as those used internally. Setting this flag to ``False``
can significantly reduce memory use, however will also degrade
performance for those areas that make use of baked queries
(such as relationship loaders). Additionally, baked query
logic in the calling application or potentially within the ORM
that may be malfunctioning due to cache key collisions or similar
can be flagged by observing if this flag resolves the issue.
.. versionadded:: 1.2
:param _enable_transaction_accounting: A
legacy-only flag which when ``False`` disables *all* 0.5-style
object accounting on transaction boundaries.
:param expire_on_commit: Defaults to ``True``. When ``True``, all
instances will be fully expired after each :meth:`~.commit`,
so that all attribute/object access subsequent to a completed
transaction will load from the most recent database state.
.. seealso::
:ref:`session_committing`
:param extension: An optional
:class:`~.SessionExtension` instance, or a list
of such instances, which will receive pre- and post- commit and
flush events, as well as a post-rollback event.
:param info: optional dictionary of arbitrary data to be associated
with this :class:`.Session`. Is available via the
:attr:`.Session.info` attribute. Note the dictionary is copied at
construction time so that modifications to the per-
:class:`.Session` dictionary will be local to that
:class:`.Session`.
.. versionadded:: 0.9.0
:param query_cls: Class which should be used to create new Query
objects, as returned by the :meth:`~.Session.query` method.
Defaults to :class:`_query.Query`.
:param twophase: When ``True``, all transactions will be started as
a "two phase" transaction, i.e. using the "two phase" semantics
of the database in use along with an XID. During a
:meth:`~.commit`, after :meth:`~.flush` has been issued for all
attached databases, the :meth:`~.TwoPhaseTransaction.prepare`
method on each database's :class:`.TwoPhaseTransaction` will be
called. This allows each database to roll back the entire
transaction, before each transaction is committed.
:param weak_identity_map: Defaults to ``True`` - when set to
``False``, objects placed in the :class:`.Session` will be
strongly referenced until explicitly removed or the
:class:`.Session` is closed.
"""
if weak_identity_map in (True, None):
self._identity_cls = identity.WeakInstanceDict
else:
self._identity_cls = identity.StrongInstanceDict
self.identity_map = self._identity_cls()
self._new = {} # InstanceState->object, strong refs object
self._deleted = {} # same
self.bind = bind
self.__binds = {}
self._flushing = False
self._warn_on_events = False
self.transaction = None
self.hash_key = _new_sessionid()
self.autoflush = autoflush
self.autocommit = autocommit
self.expire_on_commit = expire_on_commit
self.enable_baked_queries = enable_baked_queries
self._enable_transaction_accounting = _enable_transaction_accounting
self.twophase = twophase
self._query_cls = query_cls if query_cls else query.Query
if info:
self.info.update(info)
if extension:
for ext in util.to_list(extension):
SessionExtension._adapt_listener(self, ext)
if binds is not None:
for key, bind in binds.items():
self._add_bind(key, bind)
if not self.autocommit:
self.begin()
_sessions[self.hash_key] = self
connection_callable = None
transaction = None
"""The current active or inactive :class:`.SessionTransaction`."""
@util.memoized_property
def info(self):
"""A user-modifiable dictionary.
The initial value of this dictionary can be populated using the
``info`` argument to the :class:`.Session` constructor or
:class:`.sessionmaker` constructor or factory methods. The dictionary
here is always local to this :class:`.Session` and can be modified
independently of all other :class:`.Session` objects.
.. versionadded:: 0.9.0
"""
return {}
def begin(self, subtransactions=False, nested=False):
"""Begin a transaction on this :class:`.Session`.
.. warning::
The :meth:`.Session.begin` method is part of a larger pattern
of use with the :class:`.Session` known as **autocommit mode**.
This is essentially a **legacy mode of use** and is
not necessary for new applications. The :class:`.Session`
normally handles the work of "begin" transparently, which in
turn relies upon the Python DBAPI to transparently "begin"
transactions; there is **no need to explicitly begin transactions**
when using modern :class:`.Session` programming patterns.
In its default mode of ``autocommit=False``, the
:class:`.Session` does all of its work within
the context of a transaction, so as soon as you call
:meth:`.Session.commit`, the next transaction is implicitly
started when the next database operation is invoked. See
:ref:`session_autocommit` for further background.
The method will raise an error if this :class:`.Session` is already
inside of a transaction, unless
:paramref:`~.Session.begin.subtransactions` or
:paramref:`~.Session.begin.nested` are specified. A "subtransaction"
is essentially a code embedding pattern that does not affect the
transactional state of the database connection unless a rollback is
emitted, in which case the whole transaction is rolled back. For
documentation on subtransactions, please see
:ref:`session_subtransactions`.
:param subtransactions: if True, indicates that this
:meth:`~.Session.begin` can create a "subtransaction".
:param nested: if True, begins a SAVEPOINT transaction and is
equivalent to calling :meth:`~.Session.begin_nested`. For
documentation on SAVEPOINT transactions, please see
:ref:`session_begin_nested`.
:return: the :class:`.SessionTransaction` object. Note that
:class:`.SessionTransaction`
acts as a Python context manager, allowing :meth:`.Session.begin`
to be used in a "with" block. See :ref:`session_autocommit` for
an example.
.. seealso::
:ref:`session_autocommit`
:meth:`.Session.begin_nested`
"""
if self.transaction is not None:
if subtransactions or nested:
self.transaction = self.transaction._begin(nested=nested)
else:
raise sa_exc.InvalidRequestError(
"A transaction is already begun. Use "
"subtransactions=True to allow subtransactions."
)
else:
self.transaction = SessionTransaction(self, nested=nested)
return self.transaction # needed for __enter__/__exit__ hook
def begin_nested(self):
"""Begin a "nested" transaction on this Session, e.g. SAVEPOINT.
The target database(s) and associated drivers must support SQL
SAVEPOINT for this method to function correctly.
For documentation on SAVEPOINT
transactions, please see :ref:`session_begin_nested`.
:return: the :class:`.SessionTransaction` object. Note that
:class:`.SessionTransaction` acts as a context manager, allowing
:meth:`.Session.begin_nested` to be used in a "with" block.
See :ref:`session_begin_nested` for a usage example.
.. seealso::
:ref:`session_begin_nested`
:ref:`pysqlite_serializable` - special workarounds required
with the SQLite driver in order for SAVEPOINT to work
correctly.
"""
return self.begin(nested=True)
def rollback(self):
"""Rollback the current transaction in progress.
If no transaction is in progress, this method is a pass-through.
This method rolls back the current transaction or nested transaction
regardless of subtransactions being in effect. All subtransactions up
to the first real transaction are closed. Subtransactions occur when
:meth:`.begin` is called multiple times.
.. seealso::
:ref:`session_rollback`
"""
if self.transaction is None:
pass
else:
self.transaction.rollback()
def commit(self):
"""Flush pending changes and commit the current transaction.
If no transaction is in progress, this method raises an
:exc:`~sqlalchemy.exc.InvalidRequestError`.
By default, the :class:`.Session` also expires all database
loaded state on all ORM-managed attributes after transaction commit.
This so that subsequent operations load the most recent
data from the database. This behavior can be disabled using
the ``expire_on_commit=False`` option to :class:`.sessionmaker` or
the :class:`.Session` constructor.
If a subtransaction is in effect (which occurs when begin() is called
multiple times), the subtransaction will be closed, and the next call
to ``commit()`` will operate on the enclosing transaction.
When using the :class:`.Session` in its default mode of
``autocommit=False``, a new transaction will
be begun immediately after the commit, but note that the newly begun
transaction does *not* use any connection resources until the first
SQL is actually emitted.
.. seealso::
:ref:`session_committing`
"""
if self.transaction is None:
if not self.autocommit:
self.begin()
else:
raise sa_exc.InvalidRequestError("No transaction is begun.")
self.transaction.commit()
def prepare(self):
"""Prepare the current transaction in progress for two phase commit.
If no transaction is in progress, this method raises an
:exc:`~sqlalchemy.exc.InvalidRequestError`.
Only root transactions of two phase sessions can be prepared. If the
current transaction is not such, an
:exc:`~sqlalchemy.exc.InvalidRequestError` is raised.
"""
if self.transaction is None:
if not self.autocommit:
self.begin()
else:
raise sa_exc.InvalidRequestError("No transaction is begun.")
self.transaction.prepare()
def connection(
self,
mapper=None,
clause=None,
bind=None,
close_with_result=False,
execution_options=None,
**kw
):
r"""Return a :class:`_engine.Connection` object corresponding to this
:class:`.Session` object's transactional state.
If this :class:`.Session` is configured with ``autocommit=False``,
either the :class:`_engine.Connection` corresponding to the current
transaction is returned, or if no transaction is in progress, a new
one is begun and the :class:`_engine.Connection`
returned (note that no
transactional state is established with the DBAPI until the first
SQL statement is emitted).
Alternatively, if this :class:`.Session` is configured with
``autocommit=True``, an ad-hoc :class:`_engine.Connection` is returned
using :meth:`_engine.Engine.connect` on the underlying
:class:`_engine.Engine`.
Ambiguity in multi-bind or unbound :class:`.Session` objects can be
resolved through any of the optional keyword arguments. This
ultimately makes usage of the :meth:`.get_bind` method for resolution.
:param bind:
Optional :class:`_engine.Engine` to be used as the bind. If
this engine is already involved in an ongoing transaction,
that connection will be used. This argument takes precedence
over ``mapper``, ``clause``.
:param mapper:
Optional :func:`.mapper` mapped class, used to identify
the appropriate bind. This argument takes precedence over
``clause``.
:param clause:
A :class:`_expression.ClauseElement` (i.e.
:func:`_expression.select`,
:func:`_expression.text`,
etc.) which will be used to locate a bind, if a bind
cannot otherwise be identified.
:param close_with_result: Passed to :meth:`_engine.Engine.connect`,
indicating the :class:`_engine.Connection` should be considered
"single use", automatically closing when the first result set is
closed. This flag only has an effect if this :class:`.Session` is
configured with ``autocommit=True`` and does not already have a
transaction in progress.
:param execution_options: a dictionary of execution options that will
be passed to :meth:`_engine.Connection.execution_options`, **when the
connection is first procured only**. If the connection is already
present within the :class:`.Session`, a warning is emitted and
the arguments are ignored.
.. versionadded:: 0.9.9
.. seealso::
:ref:`session_transaction_isolation`
:param \**kw:
Additional keyword arguments are sent to :meth:`get_bind`,
allowing additional arguments to be passed to custom
implementations of :meth:`get_bind`.
"""
if bind is None:
bind = self.get_bind(mapper, clause=clause, **kw)
return self._connection_for_bind(
bind,
close_with_result=close_with_result,
execution_options=execution_options,
)
def _connection_for_bind(self, engine, execution_options=None, **kw):
if self.transaction is not None:
return self.transaction._connection_for_bind(
engine, execution_options
)
else:
conn = engine._contextual_connect(**kw)
if execution_options:
conn = conn.execution_options(**execution_options)
return conn
def execute(self, clause, params=None, mapper=None, bind=None, **kw):
r"""Execute a SQL expression construct or string statement within
the current transaction.
Returns a :class:`_engine.ResultProxy` representing
results of the statement execution, in the same manner as that of an
:class:`_engine.Engine` or
:class:`_engine.Connection`.
E.g.::
result = session.execute(
user_table.select().where(user_table.c.id == 5)
)
:meth:`~.Session.execute` accepts any executable clause construct,
such as :func:`_expression.select`,
:func:`_expression.insert`,
:func:`_expression.update`,
:func:`_expression.delete`, and
:func:`_expression.text`. Plain SQL strings can be passed
as well, which in the case of :meth:`.Session.execute` only
will be interpreted the same as if it were passed via a
:func:`_expression.text` construct. That is, the following usage::
result = session.execute(
"SELECT * FROM user WHERE id=:param",
{"param":5}
)
is equivalent to::
from sqlalchemy import text
result = session.execute(
text("SELECT * FROM user WHERE id=:param"),
{"param":5}
)
The second positional argument to :meth:`.Session.execute` is an
optional parameter set. Similar to that of
:meth:`_engine.Connection.execute`, whether this is passed as a single
dictionary, or a sequence of dictionaries, determines whether the DBAPI
cursor's ``execute()`` or ``executemany()`` is used to execute the
statement. An INSERT construct may be invoked for a single row::
result = session.execute(
users.insert(), {"id": 7, "name": "somename"})
or for multiple rows::
result = session.execute(users.insert(), [
{"id": 7, "name": "somename7"},
{"id": 8, "name": "somename8"},
{"id": 9, "name": "somename9"}
])
The statement is executed within the current transactional context of
this :class:`.Session`. The :class:`_engine.Connection`
which is used
to execute the statement can also be acquired directly by
calling the :meth:`.Session.connection` method. Both methods use
a rule-based resolution scheme in order to determine the
:class:`_engine.Connection`,
which in the average case is derived directly
from the "bind" of the :class:`.Session` itself, and in other cases
can be based on the :func:`.mapper`
and :class:`_schema.Table` objects passed to the method; see the
documentation for :meth:`.Session.get_bind` for a full description of
this scheme.
The :meth:`.Session.execute` method does *not* invoke autoflush.
The :class:`_engine.ResultProxy` returned by the
:meth:`.Session.execute`
method is returned with the "close_with_result" flag set to true;
the significance of this flag is that if this :class:`.Session` is
autocommitting and does not have a transaction-dedicated
:class:`_engine.Connection` available, a temporary
:class:`_engine.Connection` is
established for the statement execution, which is closed (meaning,
returned to the connection pool) when the :class:`_engine.ResultProxy`
has
consumed all available data. This applies *only* when the
:class:`.Session` is configured with autocommit=True and no
transaction has been started.
:param clause:
An executable statement (i.e. an :class:`.Executable` expression
such as :func:`_expression.select`) or string SQL statement
to be executed.
:param params:
Optional dictionary, or list of dictionaries, containing
bound parameter values. If a single dictionary, single-row
execution occurs; if a list of dictionaries, an
"executemany" will be invoked. The keys in each dictionary
must correspond to parameter names present in the statement.
:param mapper:
Optional :func:`.mapper` or mapped class, used to identify
the appropriate bind. This argument takes precedence over
``clause`` when locating a bind. See :meth:`.Session.get_bind`
for more details.
:param bind:
Optional :class:`_engine.Engine` to be used as the bind. If
this engine is already involved in an ongoing transaction,
that connection will be used. This argument takes
precedence over ``mapper`` and ``clause`` when locating
a bind.
:param \**kw:
Additional keyword arguments are sent to :meth:`.Session.get_bind()`
to allow extensibility of "bind" schemes.
.. seealso::
:ref:`sqlexpression_toplevel` - Tutorial on using Core SQL
constructs.
:ref:`connections_toplevel` - Further information on direct
statement execution.
:meth:`_engine.Connection.execute`
- core level statement execution
method, which is :meth:`.Session.execute` ultimately uses
in order to execute the statement.
"""
clause = expression._literal_as_text(
clause, allow_coercion_to_text=True
)
if bind is None:
bind = self.get_bind(mapper, clause=clause, **kw)
return self._connection_for_bind(bind, close_with_result=True).execute(
clause, params or {}
)
def scalar(self, clause, params=None, mapper=None, bind=None, **kw):
"""Like :meth:`~.Session.execute` but return a scalar result."""
return self.execute(
clause, params=params, mapper=mapper, bind=bind, **kw
).scalar()
def close(self):
"""Close this Session.
This clears all items and ends any transaction in progress.
If this session were created with ``autocommit=False``, a new
transaction is immediately begun. Note that this new transaction does
not use any connection resources until they are first needed.
"""
self._close_impl(invalidate=False)
def invalidate(self):
"""Close this Session, using connection invalidation.
This is a variant of :meth:`.Session.close` that will additionally
ensure that the :meth:`_engine.Connection.invalidate`
method will be called
on all :class:`_engine.Connection` objects. This can be called when
the database is known to be in a state where the connections are
no longer safe to be used.
E.g.::
try:
sess = Session()
sess.add(User())
sess.commit()
except gevent.Timeout:
sess.invalidate()
raise
except:
sess.rollback()
raise
This clears all items and ends any transaction in progress.
If this session were created with ``autocommit=False``, a new
transaction is immediately begun. Note that this new transaction does
not use any connection resources until they are first needed.
.. versionadded:: 0.9.9
"""
self._close_impl(invalidate=True)
def _close_impl(self, invalidate):
self.expunge_all()
if self.transaction is not None:
for transaction in self.transaction._iterate_self_and_parents():
transaction.close(invalidate)
def expunge_all(self):
"""Remove all object instances from this ``Session``.
This is equivalent to calling ``expunge(obj)`` on all objects in this
``Session``.
"""
all_states = self.identity_map.all_states() + list(self._new)
self.identity_map = self._identity_cls()
self._new = {}
self._deleted = {}
statelib.InstanceState._detach_states(all_states, self)
def _add_bind(self, key, bind):
try:
insp = inspect(key)
except sa_exc.NoInspectionAvailable as err:
if not isinstance(key, type):
util.raise_(
sa_exc.ArgumentError(
"Not an acceptable bind target: %s" % key
),
replace_context=err,
)
else:
self.__binds[key] = bind
else:
if insp.is_selectable:
self.__binds[insp] = bind
elif insp.is_mapper:
self.__binds[insp.class_] = bind
for selectable in insp._all_tables:
self.__binds[selectable] = bind
else:
raise sa_exc.ArgumentError(
"Not an acceptable bind target: %s" % key
)
def bind_mapper(self, mapper, bind):
"""Associate a :class:`_orm.Mapper` or arbitrary Python class with a
"bind", e.g. an :class:`_engine.Engine` or
:class:`_engine.Connection`.
The given entity is added to a lookup used by the
:meth:`.Session.get_bind` method.
:param mapper: a :class:`_orm.Mapper` object,
or an instance of a mapped
class, or any Python class that is the base of a set of mapped
classes.
:param bind: an :class:`_engine.Engine` or :class:`_engine.Connection`
object.
.. seealso::
:ref:`session_partitioning`
:paramref:`.Session.binds`
:meth:`.Session.bind_table`
"""
self._add_bind(mapper, bind)
def bind_table(self, table, bind):
"""Associate a :class:`_schema.Table` with a "bind", e.g. an
:class:`_engine.Engine`
or :class:`_engine.Connection`.
The given :class:`_schema.Table` is added to a lookup used by the
:meth:`.Session.get_bind` method.
:param table: a :class:`_schema.Table` object,
which is typically the target
of an ORM mapping, or is present within a selectable that is
mapped.
:param bind: an :class:`_engine.Engine` or :class:`_engine.Connection`
object.
.. seealso::
:ref:`session_partitioning`
:paramref:`.Session.binds`
:meth:`.Session.bind_mapper`
"""
self._add_bind(table, bind)
def get_bind(self, mapper=None, clause=None):
"""Return a "bind" to which this :class:`.Session` is bound.
The "bind" is usually an instance of :class:`_engine.Engine`,
except in the case where the :class:`.Session` has been
explicitly bound directly to a :class:`_engine.Connection`.
For a multiply-bound or unbound :class:`.Session`, the
``mapper`` or ``clause`` arguments are used to determine the
appropriate bind to return.
Note that the "mapper" argument is usually present
when :meth:`.Session.get_bind` is called via an ORM
operation such as a :meth:`.Session.query`, each
individual INSERT/UPDATE/DELETE operation within a
:meth:`.Session.flush`, call, etc.
The order of resolution is:
1. if mapper given and :paramref:`.Session.binds` is present,
locate a bind based first on the mapper in use, then
on the mapped class in use, then on any base classes that are
present in the ``__mro__`` of the mapped class, from more specific
superclasses to more general.
2. if clause given and ``Session.binds`` is present,
locate a bind based on :class:`_schema.Table` objects
found in the given clause present in ``Session.binds``.
3. if ``Session.binds`` is present, return that.
4. if clause given, attempt to return a bind
linked to the :class:`_schema.MetaData` ultimately
associated with the clause.
5. if mapper given, attempt to return a bind
linked to the :class:`_schema.MetaData` ultimately
associated with the :class:`_schema.Table` or other
selectable to which the mapper is mapped.
6. No bind can be found, :exc:`~sqlalchemy.exc.UnboundExecutionError`
is raised.
Note that the :meth:`.Session.get_bind` method can be overridden on
a user-defined subclass of :class:`.Session` to provide any kind
of bind resolution scheme. See the example at
:ref:`session_custom_partitioning`.
:param mapper:
Optional :func:`.mapper` mapped class or instance of
:class:`_orm.Mapper`. The bind can be derived from a
:class:`_orm.Mapper`
first by consulting the "binds" map associated with this
:class:`.Session`, and secondly by consulting the
:class:`_schema.MetaData`
associated with the :class:`_schema.Table` to which the
:class:`_orm.Mapper`
is mapped for a bind.
:param clause:
A :class:`_expression.ClauseElement` (i.e.
:func:`_expression.select`,
:func:`_expression.text`,
etc.). If the ``mapper`` argument is not present or could not
produce a bind, the given expression construct will be searched
for a bound element, typically a :class:`_schema.Table`
associated with
bound :class:`_schema.MetaData`.
.. seealso::
:ref:`session_partitioning`
:paramref:`.Session.binds`
:meth:`.Session.bind_mapper`
:meth:`.Session.bind_table`
"""
if mapper is clause is None:
if self.bind:
return self.bind
else:
raise sa_exc.UnboundExecutionError(
"This session is not bound to a single Engine or "
"Connection, and no context was provided to locate "
"a binding."
)
if mapper is not None:
try:
mapper = inspect(mapper)
except sa_exc.NoInspectionAvailable as err:
if isinstance(mapper, type):
util.raise_(
exc.UnmappedClassError(mapper),
replace_context=err,
)
else:
raise
if self.__binds:
if mapper:
for cls in mapper.class_.__mro__:
if cls in self.__binds:
return self.__binds[cls]
if clause is None:
clause = mapper.persist_selectable
if clause is not None:
for t in sql_util.find_tables(clause, include_crud=True):
if t in self.__binds:
return self.__binds[t]
if self.bind:
return self.bind
if isinstance(clause, sql.expression.ClauseElement) and clause.bind:
return clause.bind
if mapper and mapper.persist_selectable.bind:
return mapper.persist_selectable.bind
context = []
if mapper is not None:
context.append("mapper %s" % mapper)
if clause is not None:
context.append("SQL expression")
raise sa_exc.UnboundExecutionError(
"Could not locate a bind configured on %s or this Session"
% (", ".join(context))
)
def query(self, *entities, **kwargs):
"""Return a new :class:`_query.Query` object corresponding to this
:class:`.Session`."""
return self._query_cls(entities, self, **kwargs)
@property
@util.contextmanager
def no_autoflush(self):
"""Return a context manager that disables autoflush.
e.g.::
with session.no_autoflush:
some_object = SomeClass()
session.add(some_object)
# won't autoflush
some_object.related_thing = session.query(SomeRelated).first()
Operations that proceed within the ``with:`` block
will not be subject to flushes occurring upon query
access. This is useful when initializing a series
of objects which involve existing database queries,
where the uncompleted object should not yet be flushed.
"""
autoflush = self.autoflush
self.autoflush = False
try:
yield self
finally:
self.autoflush = autoflush
def _autoflush(self):
if self.autoflush and not self._flushing:
try:
self.flush()
except sa_exc.StatementError as e:
# note we are reraising StatementError as opposed to
# raising FlushError with "chaining" to remain compatible
# with code that catches StatementError, IntegrityError,
# etc.
e.add_detail(
"raised as a result of Query-invoked autoflush; "
"consider using a session.no_autoflush block if this "
"flush is occurring prematurely"
)
util.raise_(e, with_traceback=sys.exc_info()[2])
def refresh(
self,
instance,
attribute_names=None,
with_for_update=None,
lockmode=None,
):
"""Expire and refresh the attributes on the given instance.
A query will be issued to the database and all attributes will be
refreshed with their current database value.
Lazy-loaded relational attributes will remain lazily loaded, so that
the instance-wide refresh operation will be followed immediately by
the lazy load of that attribute.
Eagerly-loaded relational attributes will eagerly load within the
single refresh operation.
Note that a highly isolated transaction will return the same values as
were previously read in that same transaction, regardless of changes
in database state outside of that transaction - usage of
:meth:`~Session.refresh` usually only makes sense if non-ORM SQL
statement were emitted in the ongoing transaction, or if autocommit
mode is turned on.
:param attribute_names: optional. An iterable collection of
string attribute names indicating a subset of attributes to
be refreshed.
:param with_for_update: optional boolean ``True`` indicating FOR UPDATE
should be used, or may be a dictionary containing flags to
indicate a more specific set of FOR UPDATE flags for the SELECT;
flags should match the parameters of
:meth:`_query.Query.with_for_update`.
Supersedes the :paramref:`.Session.refresh.lockmode` parameter.
.. versionadded:: 1.2
:param lockmode: Passed to the :class:`~sqlalchemy.orm.query.Query`
as used by :meth:`~sqlalchemy.orm.query.Query.with_lockmode`.
Superseded by :paramref:`.Session.refresh.with_for_update`.
.. seealso::
:ref:`session_expire` - introductory material
:meth:`.Session.expire`
:meth:`.Session.expire_all`
:meth:`_orm.Query.populate_existing`
"""
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
self._expire_state(state, attribute_names)
if with_for_update == {}:
raise sa_exc.ArgumentError(
"with_for_update should be the boolean value "
"True, or a dictionary with options. "
"A blank dictionary is ambiguous."
)
if lockmode:
with_for_update = query.LockmodeArg.parse_legacy_query(lockmode)
elif with_for_update is not None:
if with_for_update is True:
with_for_update = query.LockmodeArg()
elif with_for_update:
with_for_update = query.LockmodeArg(**with_for_update)
else:
with_for_update = None
if (
loading.load_on_ident(
self.query(object_mapper(instance)),
state.key,
refresh_state=state,
with_for_update=with_for_update,
only_load_props=attribute_names,
)
is None
):
raise sa_exc.InvalidRequestError(
"Could not refresh instance '%s'" % instance_str(instance)
)
def expire_all(self):
"""Expires all persistent instances within this Session.
When any attributes on a persistent instance is next accessed,
a query will be issued using the
:class:`.Session` object's current transactional context in order to
load all expired attributes for the given instance. Note that
a highly isolated transaction will return the same values as were
previously read in that same transaction, regardless of changes
in database state outside of that transaction.
To expire individual objects and individual attributes
on those objects, use :meth:`Session.expire`.
The :class:`.Session` object's default behavior is to
expire all state whenever the :meth:`Session.rollback`
or :meth:`Session.commit` methods are called, so that new
state can be loaded for the new transaction. For this reason,
calling :meth:`Session.expire_all` should not be needed when
autocommit is ``False``, assuming the transaction is isolated.
.. seealso::
:ref:`session_expire` - introductory material
:meth:`.Session.expire`
:meth:`.Session.refresh`
:meth:`_orm.Query.populate_existing`
"""
for state in self.identity_map.all_states():
state._expire(state.dict, self.identity_map._modified)
def expire(self, instance, attribute_names=None):
"""Expire the attributes on an instance.
Marks the attributes of an instance as out of date. When an expired
attribute is next accessed, a query will be issued to the
:class:`.Session` object's current transactional context in order to
load all expired attributes for the given instance. Note that
a highly isolated transaction will return the same values as were
previously read in that same transaction, regardless of changes
in database state outside of that transaction.
To expire all objects in the :class:`.Session` simultaneously,
use :meth:`Session.expire_all`.
The :class:`.Session` object's default behavior is to
expire all state whenever the :meth:`Session.rollback`
or :meth:`Session.commit` methods are called, so that new
state can be loaded for the new transaction. For this reason,
calling :meth:`Session.expire` only makes sense for the specific
case that a non-ORM SQL statement was emitted in the current
transaction.
:param instance: The instance to be refreshed.
:param attribute_names: optional list of string attribute names
indicating a subset of attributes to be expired.
.. seealso::
:ref:`session_expire` - introductory material
:meth:`.Session.expire`
:meth:`.Session.refresh`
:meth:`_orm.Query.populate_existing`
"""
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
self._expire_state(state, attribute_names)
def _expire_state(self, state, attribute_names):
self._validate_persistent(state)
if attribute_names:
state._expire_attributes(state.dict, attribute_names)
else:
# pre-fetch the full cascade since the expire is going to
# remove associations
cascaded = list(
state.manager.mapper.cascade_iterator("refresh-expire", state)
)
self._conditional_expire(state)
for o, m, st_, dct_ in cascaded:
self._conditional_expire(st_)
def _conditional_expire(self, state):
"""Expire a state if persistent, else expunge if pending"""
if state.key:
state._expire(state.dict, self.identity_map._modified)
elif state in self._new:
self._new.pop(state)
state._detach(self)
@util.deprecated(
"0.7",
"The :meth:`.Session.prune` method is deprecated along with "
":paramref:`.Session.weak_identity_map`. This method will be "
"removed in a future release.",
)
def prune(self):
"""Remove unreferenced instances cached in the identity map.
Note that this method is only meaningful if "weak_identity_map" is set
to False. The default weak identity map is self-pruning.
Removes any object in this Session's identity map that is not
referenced in user code, modified, new or scheduled for deletion.
Returns the number of objects pruned.
"""
return self.identity_map.prune()
def expunge(self, instance):
"""Remove the `instance` from this ``Session``.
This will free all internal references to the instance. Cascading
will be applied according to the *expunge* cascade rule.
"""
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
if state.session_id is not self.hash_key:
raise sa_exc.InvalidRequestError(
"Instance %s is not present in this Session" % state_str(state)
)
cascaded = list(
state.manager.mapper.cascade_iterator("expunge", state)
)
self._expunge_states([state] + [st_ for o, m, st_, dct_ in cascaded])
def _expunge_states(self, states, to_transient=False):
for state in states:
if state in self._new:
self._new.pop(state)
elif self.identity_map.contains_state(state):
self.identity_map.safe_discard(state)
self._deleted.pop(state, None)
elif self.transaction:
# state is "detached" from being deleted, but still present
# in the transaction snapshot
self.transaction._deleted.pop(state, None)
statelib.InstanceState._detach_states(
states, self, to_transient=to_transient
)
def _register_persistent(self, states):
"""Register all persistent objects from a flush.
This is used both for pending objects moving to the persistent
state as well as already persistent objects.
"""
pending_to_persistent = self.dispatch.pending_to_persistent or None
for state in states:
mapper = _state_mapper(state)
# prevent against last minute dereferences of the object
obj = state.obj()
if obj is not None:
instance_key = mapper._identity_key_from_state(state)
if (
_none_set.intersection(instance_key[1])
and not mapper.allow_partial_pks
or _none_set.issuperset(instance_key[1])
):
raise exc.FlushError(
"Instance %s has a NULL identity key. If this is an "
"auto-generated value, check that the database table "
"allows generation of new primary key values, and "
"that the mapped Column object is configured to "
"expect these generated values. Ensure also that "
"this flush() is not occurring at an inappropriate "
"time, such as within a load() event."
% state_str(state)
)
if state.key is None:
state.key = instance_key
elif state.key != instance_key:
# primary key switch. use safe_discard() in case another
# state has already replaced this one in the identity
# map (see test/orm/test_naturalpks.py ReversePKsTest)
self.identity_map.safe_discard(state)
if state in self.transaction._key_switches:
orig_key = self.transaction._key_switches[state][0]
else:
orig_key = state.key
self.transaction._key_switches[state] = (
orig_key,
instance_key,
)
state.key = instance_key
# there can be an existing state in the identity map
# that is replaced when the primary keys of two instances
# are swapped; see test/orm/test_naturalpks.py -> test_reverse
old = self.identity_map.replace(state)
if (
old is not None
and mapper._identity_key_from_state(old) == instance_key
and old.obj() is not None
):
util.warn(
"Identity map already had an identity for %s, "
"replacing it with newly flushed object. Are there "
"load operations occurring inside of an event handler "
"within the flush?" % (instance_key,)
)
state._orphaned_outside_of_session = False
statelib.InstanceState._commit_all_states(
((state, state.dict) for state in states), self.identity_map
)
self._register_altered(states)
if pending_to_persistent is not None:
for state in states.intersection(self._new):
pending_to_persistent(self, state)
# remove from new last, might be the last strong ref
for state in set(states).intersection(self._new):
self._new.pop(state)
def _register_altered(self, states):
if self._enable_transaction_accounting and self.transaction:
for state in states:
if state in self._new:
self.transaction._new[state] = True
else:
self.transaction._dirty[state] = True
def _remove_newly_deleted(self, states):
persistent_to_deleted = self.dispatch.persistent_to_deleted or None
for state in states:
if self._enable_transaction_accounting and self.transaction:
self.transaction._deleted[state] = True
if persistent_to_deleted is not None:
# get a strong reference before we pop out of
# self._deleted
obj = state.obj() # noqa
self.identity_map.safe_discard(state)
self._deleted.pop(state, None)
state._deleted = True
# can't call state._detach() here, because this state
# is still in the transaction snapshot and needs to be
# tracked as part of that
if persistent_to_deleted is not None:
persistent_to_deleted(self, state)
def add(self, instance, _warn=True):
"""Place an object in the ``Session``.
Its state will be persisted to the database on the next flush
operation.
Repeated calls to ``add()`` will be ignored. The opposite of ``add()``
is ``expunge()``.
"""
if _warn and self._warn_on_events:
self._flush_warning("Session.add()")
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
self._save_or_update_state(state)
def add_all(self, instances):
"""Add the given collection of instances to this ``Session``."""
if self._warn_on_events:
self._flush_warning("Session.add_all()")
for instance in instances:
self.add(instance, _warn=False)
def _save_or_update_state(self, state):
state._orphaned_outside_of_session = False
self._save_or_update_impl(state)
mapper = _state_mapper(state)
for o, m, st_, dct_ in mapper.cascade_iterator(
"save-update", state, halt_on=self._contains_state
):
self._save_or_update_impl(st_)
def delete(self, instance):
"""Mark an instance as deleted.
The database delete operation occurs upon ``flush()``.
"""
if self._warn_on_events:
self._flush_warning("Session.delete()")
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
self._delete_impl(state, instance, head=True)
def _delete_impl(self, state, obj, head):
if state.key is None:
if head:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" % state_str(state)
)
else:
return
to_attach = self._before_attach(state, obj)
if state in self._deleted:
return
self.identity_map.add(state)
if to_attach:
self._after_attach(state, obj)
if head:
# grab the cascades before adding the item to the deleted list
# so that autoflush does not delete the item
# the strong reference to the instance itself is significant here
cascade_states = list(
state.manager.mapper.cascade_iterator("delete", state)
)
self._deleted[state] = obj
if head:
for o, m, st_, dct_ in cascade_states:
self._delete_impl(st_, o, False)
def merge(self, instance, load=True):
"""Copy the state of a given instance into a corresponding instance
within this :class:`.Session`.
:meth:`.Session.merge` examines the primary key attributes of the
source instance, and attempts to reconcile it with an instance of the
same primary key in the session. If not found locally, it attempts
to load the object from the database based on primary key, and if
none can be located, creates a new instance. The state of each
attribute on the source instance is then copied to the target
instance. The resulting target instance is then returned by the
method; the original source instance is left unmodified, and
un-associated with the :class:`.Session` if not already.
This operation cascades to associated instances if the association is
mapped with ``cascade="merge"``.
See :ref:`unitofwork_merging` for a detailed discussion of merging.
.. versionchanged:: 1.1 - :meth:`.Session.merge` will now reconcile
pending objects with overlapping primary keys in the same way
as persistent. See :ref:`change_3601` for discussion.
:param instance: Instance to be merged.
:param load: Boolean, when False, :meth:`.merge` switches into
a "high performance" mode which causes it to forego emitting history
events as well as all database access. This flag is used for
cases such as transferring graphs of objects into a :class:`.Session`
from a second level cache, or to transfer just-loaded objects
into the :class:`.Session` owned by a worker thread or process
without re-querying the database.
The ``load=False`` use case adds the caveat that the given
object has to be in a "clean" state, that is, has no pending changes
to be flushed - even if the incoming object is detached from any
:class:`.Session`. This is so that when
the merge operation populates local attributes and
cascades to related objects and
collections, the values can be "stamped" onto the
target object as is, without generating any history or attribute
events, and without the need to reconcile the incoming data with
any existing related objects or collections that might not
be loaded. The resulting objects from ``load=False`` are always
produced as "clean", so it is only appropriate that the given objects
should be "clean" as well, else this suggests a mis-use of the
method.
.. seealso::
:func:`.make_transient_to_detached` - provides for an alternative
means of "merging" a single object into the :class:`.Session`
"""
if self._warn_on_events:
self._flush_warning("Session.merge()")
_recursive = {}
_resolve_conflict_map = {}
if load:
# flush current contents if we expect to load data
self._autoflush()
object_mapper(instance) # verify mapped
autoflush = self.autoflush
try:
self.autoflush = False
return self._merge(
attributes.instance_state(instance),
attributes.instance_dict(instance),
load=load,
_recursive=_recursive,
_resolve_conflict_map=_resolve_conflict_map,
)
finally:
self.autoflush = autoflush
def _merge(
self,
state,
state_dict,
load=True,
_recursive=None,
_resolve_conflict_map=None,
):
mapper = _state_mapper(state)
if state in _recursive:
return _recursive[state]
new_instance = False
key = state.key
if key is None:
if state in self._new:
util.warn(
"Instance %s is already pending in this Session yet is "
"being merged again; this is probably not what you want "
"to do" % state_str(state)
)
if not load:
raise sa_exc.InvalidRequestError(
"merge() with load=False option does not support "
"objects transient (i.e. unpersisted) objects. flush() "
"all changes on mapped instances before merging with "
"load=False."
)
key = mapper._identity_key_from_state(state)
key_is_persistent = attributes.NEVER_SET not in key[1] and (
not _none_set.intersection(key[1])
or (
mapper.allow_partial_pks
and not _none_set.issuperset(key[1])
)
)
else:
key_is_persistent = True
if key in self.identity_map:
try:
merged = self.identity_map[key]
except KeyError:
# object was GC'ed right as we checked for it
merged = None
else:
merged = None
if merged is None:
if key_is_persistent and key in _resolve_conflict_map:
merged = _resolve_conflict_map[key]
elif not load:
if state.modified:
raise sa_exc.InvalidRequestError(
"merge() with load=False option does not support "
"objects marked as 'dirty'. flush() all changes on "
"mapped instances before merging with load=False."
)
merged = mapper.class_manager.new_instance()
merged_state = attributes.instance_state(merged)
merged_state.key = key
self._update_impl(merged_state)
new_instance = True
elif key_is_persistent:
merged = self.query(mapper.class_).get(key[1])
if merged is None:
merged = mapper.class_manager.new_instance()
merged_state = attributes.instance_state(merged)
merged_dict = attributes.instance_dict(merged)
new_instance = True
self._save_or_update_state(merged_state)
else:
merged_state = attributes.instance_state(merged)
merged_dict = attributes.instance_dict(merged)
_recursive[state] = merged
_resolve_conflict_map[key] = merged
# check that we didn't just pull the exact same
# state out.
if state is not merged_state:
# version check if applicable
if mapper.version_id_col is not None:
existing_version = mapper._get_state_attr_by_column(
state,
state_dict,
mapper.version_id_col,
passive=attributes.PASSIVE_NO_INITIALIZE,
)
merged_version = mapper._get_state_attr_by_column(
merged_state,
merged_dict,
mapper.version_id_col,
passive=attributes.PASSIVE_NO_INITIALIZE,
)
if (
existing_version is not attributes.PASSIVE_NO_RESULT
and merged_version is not attributes.PASSIVE_NO_RESULT
and existing_version != merged_version
):
raise exc.StaleDataError(
"Version id '%s' on merged state %s "
"does not match existing version '%s'. "
"Leave the version attribute unset when "
"merging to update the most recent version."
% (
existing_version,
state_str(merged_state),
merged_version,
)
)
merged_state.load_path = state.load_path
merged_state.load_options = state.load_options
# since we are copying load_options, we need to copy
# the callables_ that would have been generated by those
# load_options.
# assumes that the callables we put in state.callables_
# are not instance-specific (which they should not be)
merged_state._copy_callables(state)
for prop in mapper.iterate_properties:
prop.merge(
self,
state,
state_dict,
merged_state,
merged_dict,
load,
_recursive,
_resolve_conflict_map,
)
if not load:
# remove any history
merged_state._commit_all(merged_dict, self.identity_map)
if new_instance:
merged_state.manager.dispatch.load(merged_state, None)
return merged
def _validate_persistent(self, state):
if not self.identity_map.contains_state(state):
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persistent within this Session"
% state_str(state)
)
def _save_impl(self, state):
if state.key is not None:
raise sa_exc.InvalidRequestError(
"Object '%s' already has an identity - "
"it can't be registered as pending" % state_str(state)
)
obj = state.obj()
to_attach = self._before_attach(state, obj)
if state not in self._new:
self._new[state] = obj
state.insert_order = len(self._new)
if to_attach:
self._after_attach(state, obj)
def _update_impl(self, state, revert_deletion=False):
if state.key is None:
raise sa_exc.InvalidRequestError(
"Instance '%s' is not persisted" % state_str(state)
)
if state._deleted:
if revert_deletion:
if not state._attached:
return
del state._deleted
else:
raise sa_exc.InvalidRequestError(
"Instance '%s' has been deleted. "
"Use the make_transient() "
"function to send this object back "
"to the transient state." % state_str(state)
)
obj = state.obj()
# check for late gc
if obj is None:
return
to_attach = self._before_attach(state, obj)
self._deleted.pop(state, None)
if revert_deletion:
self.identity_map.replace(state)
else:
self.identity_map.add(state)
if to_attach:
self._after_attach(state, obj)
elif revert_deletion:
self.dispatch.deleted_to_persistent(self, state)
def _save_or_update_impl(self, state):
if state.key is None:
self._save_impl(state)
else:
self._update_impl(state)
def enable_relationship_loading(self, obj):
"""Associate an object with this :class:`.Session` for related
object loading.
.. warning::
:meth:`.enable_relationship_loading` exists to serve special
use cases and is not recommended for general use.
Accesses of attributes mapped with :func:`_orm.relationship`
will attempt to load a value from the database using this
:class:`.Session` as the source of connectivity. The values
will be loaded based on foreign key and primary key values
present on this object - if not present, then those relationships
will be unavailable.
The object will be attached to this session, but will
**not** participate in any persistence operations; its state
for almost all purposes will remain either "transient" or
"detached", except for the case of relationship loading.
Also note that backrefs will often not work as expected.
Altering a relationship-bound attribute on the target object
may not fire off a backref event, if the effective value
is what was already loaded from a foreign-key-holding value.
The :meth:`.Session.enable_relationship_loading` method is
similar to the ``load_on_pending`` flag on :func:`_orm.relationship`.
Unlike that flag, :meth:`.Session.enable_relationship_loading` allows
an object to remain transient while still being able to load
related items.
To make a transient object associated with a :class:`.Session`
via :meth:`.Session.enable_relationship_loading` pending, add
it to the :class:`.Session` using :meth:`.Session.add` normally.
If the object instead represents an existing identity in the database,
it should be merged using :meth:`.Session.merge`.
:meth:`.Session.enable_relationship_loading` does not improve
behavior when the ORM is used normally - object references should be
constructed at the object level, not at the foreign key level, so
that they are present in an ordinary way before flush()
proceeds. This method is not intended for general use.
.. seealso::
:paramref:`_orm.relationship.load_on_pending` - this flag
allows per-relationship loading of many-to-ones on items that
are pending.
:func:`.make_transient_to_detached` - allows for an object to
be added to a :class:`.Session` without SQL emitted, which then
will unexpire attributes on access.
"""
state = attributes.instance_state(obj)
to_attach = self._before_attach(state, obj)
state._load_pending = True
if to_attach:
self._after_attach(state, obj)
def _before_attach(self, state, obj):
if state.session_id == self.hash_key:
return False
if state.session_id and state.session_id in _sessions:
raise sa_exc.InvalidRequestError(
"Object '%s' is already attached to session '%s' "
"(this is '%s')"
% (state_str(state), state.session_id, self.hash_key)
)
self.dispatch.before_attach(self, state)
return True
def _after_attach(self, state, obj):
state.session_id = self.hash_key
if state.modified and state._strong_obj is None:
state._strong_obj = obj
self.dispatch.after_attach(self, state)
if state.key:
self.dispatch.detached_to_persistent(self, state)
else:
self.dispatch.transient_to_pending(self, state)
def __contains__(self, instance):
"""Return True if the instance is associated with this session.
The instance may be pending or persistent within the Session for a
result of True.
"""
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
return self._contains_state(state)
def __iter__(self):
"""Iterate over all pending or persistent instances within this
Session.
"""
return iter(
list(self._new.values()) + list(self.identity_map.values())
)
def _contains_state(self, state):
return state in self._new or self.identity_map.contains_state(state)
def flush(self, objects=None):
"""Flush all the object changes to the database.
Writes out all pending object creations, deletions and modifications
to the database as INSERTs, DELETEs, UPDATEs, etc. Operations are
automatically ordered by the Session's unit of work dependency
solver.
Database operations will be issued in the current transactional
context and do not affect the state of the transaction, unless an
error occurs, in which case the entire transaction is rolled back.
You may flush() as often as you like within a transaction to move
changes from Python to the database's transaction buffer.
For ``autocommit`` Sessions with no active manual transaction, flush()
will create a transaction on the fly that surrounds the entire set of
operations into the flush.
:param objects: Optional; restricts the flush operation to operate
only on elements that are in the given collection.
This feature is for an extremely narrow set of use cases where
particular objects may need to be operated upon before the
full flush() occurs. It is not intended for general use.
"""
if self._flushing:
raise sa_exc.InvalidRequestError("Session is already flushing")
if self._is_clean():
return
try:
self._flushing = True
self._flush(objects)
finally:
self._flushing = False
def _flush_warning(self, method):
util.warn(
"Usage of the '%s' operation is not currently supported "
"within the execution stage of the flush process. "
"Results may not be consistent. Consider using alternative "
"event listeners or connection-level operations instead." % method
)
def _is_clean(self):
return (
not self.identity_map.check_modified()
and not self._deleted
and not self._new
)
def _flush(self, objects=None):
dirty = self._dirty_states
if not dirty and not self._deleted and not self._new:
self.identity_map._modified.clear()
return
flush_context = UOWTransaction(self)
if self.dispatch.before_flush:
self.dispatch.before_flush(self, flush_context, objects)
# re-establish "dirty states" in case the listeners
# added
dirty = self._dirty_states
deleted = set(self._deleted)
new = set(self._new)
dirty = set(dirty).difference(deleted)
# create the set of all objects we want to operate upon
if objects:
# specific list passed in
objset = set()
for o in objects:
try:
state = attributes.instance_state(o)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(o),
replace_context=err,
)
objset.add(state)
else:
objset = None
# store objects whose fate has been decided
processed = set()
# put all saves/updates into the flush context. detect top-level
# orphans and throw them into deleted.
if objset:
proc = new.union(dirty).intersection(objset).difference(deleted)
else:
proc = new.union(dirty).difference(deleted)
for state in proc:
is_orphan = _state_mapper(state)._is_orphan(state)
is_persistent_orphan = is_orphan and state.has_identity
if (
is_orphan
and not is_persistent_orphan
and state._orphaned_outside_of_session
):
self._expunge_states([state])
else:
_reg = flush_context.register_object(
state, isdelete=is_persistent_orphan
)
assert _reg, "Failed to add object to the flush context!"
processed.add(state)
# put all remaining deletes into the flush context.
if objset:
proc = deleted.intersection(objset).difference(processed)
else:
proc = deleted.difference(processed)
for state in proc:
_reg = flush_context.register_object(state, isdelete=True)
assert _reg, "Failed to add object to the flush context!"
if not flush_context.has_work:
return
flush_context.transaction = transaction = self.begin(
subtransactions=True
)
try:
self._warn_on_events = True
try:
flush_context.execute()
finally:
self._warn_on_events = False
self.dispatch.after_flush(self, flush_context)
flush_context.finalize_flush_changes()
if not objects and self.identity_map._modified:
len_ = len(self.identity_map._modified)
statelib.InstanceState._commit_all_states(
[
(state, state.dict)
for state in self.identity_map._modified
],
instance_dict=self.identity_map,
)
util.warn(
"Attribute history events accumulated on %d "
"previously clean instances "
"within inner-flush event handlers have been "
"reset, and will not result in database updates. "
"Consider using set_committed_value() within "
"inner-flush event handlers to avoid this warning." % len_
)
# useful assertions:
# if not objects:
# assert not self.identity_map._modified
# else:
# assert self.identity_map._modified == \
# self.identity_map._modified.difference(objects)
self.dispatch.after_flush_postexec(self, flush_context)
transaction.commit()
except:
with util.safe_reraise():
transaction.rollback(_capture_exception=True)
def bulk_save_objects(
self,
objects,
return_defaults=False,
update_changed_only=True,
preserve_order=True,
):
"""Perform a bulk save of the given list of objects.
The bulk save feature allows mapped objects to be used as the
source of simple INSERT and UPDATE operations which can be more easily
grouped together into higher performing "executemany"
operations; the extraction of data from the objects is also performed
using a lower-latency process that ignores whether or not attributes
have actually been modified in the case of UPDATEs, and also ignores
SQL expressions.
The objects as given are not added to the session and no additional
state is established on them, unless the ``return_defaults`` flag
is also set, in which case primary key attributes and server-side
default values will be populated.
.. versionadded:: 1.0.0
.. warning::
The bulk save feature allows for a lower-latency INSERT/UPDATE
of rows at the expense of most other unit-of-work features.
Features such as object management, relationship handling,
and SQL clause support are **silently omitted** in favor of raw
INSERT/UPDATES of records.
**Please read the list of caveats at**
:ref:`bulk_operations_caveats` **before using this method, and
fully test and confirm the functionality of all code developed
using these systems.**
:param objects: a sequence of mapped object instances. The mapped
objects are persisted as is, and are **not** associated with the
:class:`.Session` afterwards.
For each object, whether the object is sent as an INSERT or an
UPDATE is dependent on the same rules used by the :class:`.Session`
in traditional operation; if the object has the
:attr:`.InstanceState.key`
attribute set, then the object is assumed to be "detached" and
will result in an UPDATE. Otherwise, an INSERT is used.
In the case of an UPDATE, statements are grouped based on which
attributes have changed, and are thus to be the subject of each
SET clause. If ``update_changed_only`` is False, then all
attributes present within each object are applied to the UPDATE
statement, which may help in allowing the statements to be grouped
together into a larger executemany(), and will also reduce the
overhead of checking history on attributes.
:param return_defaults: when True, rows that are missing values which
generate defaults, namely integer primary key defaults and sequences,
will be inserted **one at a time**, so that the primary key value
is available. In particular this will allow joined-inheritance
and other multi-table mappings to insert correctly without the need
to provide primary key values ahead of time; however,
:paramref:`.Session.bulk_save_objects.return_defaults` **greatly
reduces the performance gains** of the method overall.
:param update_changed_only: when True, UPDATE statements are rendered
based on those attributes in each state that have logged changes.
When False, all attributes present are rendered into the SET clause
with the exception of primary key attributes.
:param preserve_order: when True, the order of inserts and updates
matches exactly the order in which the objects are given. When
False, common types of objects are grouped into inserts
and updates, to allow for more batching opportunities.
.. versionadded:: 1.3
.. seealso::
:ref:`bulk_operations`
:meth:`.Session.bulk_insert_mappings`
:meth:`.Session.bulk_update_mappings`
"""
def key(state):
return (state.mapper, state.key is not None)
obj_states = (attributes.instance_state(obj) for obj in objects)
if not preserve_order:
obj_states = sorted(obj_states, key=key)
for (mapper, isupdate), states in itertools.groupby(obj_states, key):
self._bulk_save_mappings(
mapper,
states,
isupdate,
True,
return_defaults,
update_changed_only,
False,
)
def bulk_insert_mappings(
self, mapper, mappings, return_defaults=False, render_nulls=False
):
"""Perform a bulk insert of the given list of mapping dictionaries.
The bulk insert feature allows plain Python dictionaries to be used as
the source of simple INSERT operations which can be more easily
grouped together into higher performing "executemany"
operations. Using dictionaries, there is no "history" or session
state management features in use, reducing latency when inserting
large numbers of simple rows.
The values within the dictionaries as given are typically passed
without modification into Core :meth:`_expression.Insert` constructs,
after
organizing the values within them across the tables to which
the given mapper is mapped.
.. versionadded:: 1.0.0
.. warning::
The bulk insert feature allows for a lower-latency INSERT
of rows at the expense of most other unit-of-work features.
Features such as object management, relationship handling,
and SQL clause support are **silently omitted** in favor of raw
INSERT of records.
**Please read the list of caveats at**
:ref:`bulk_operations_caveats` **before using this method, and
fully test and confirm the functionality of all code developed
using these systems.**
:param mapper: a mapped class, or the actual :class:`_orm.Mapper`
object,
representing the single kind of object represented within the mapping
list.
:param mappings: a sequence of dictionaries, each one containing the
state of the mapped row to be inserted, in terms of the attribute
names on the mapped class. If the mapping refers to multiple tables,
such as a joined-inheritance mapping, each dictionary must contain all
keys to be populated into all tables.
:param return_defaults: when True, rows that are missing values which
generate defaults, namely integer primary key defaults and sequences,
will be inserted **one at a time**, so that the primary key value
is available. In particular this will allow joined-inheritance
and other multi-table mappings to insert correctly without the need
to provide primary
key values ahead of time; however,
:paramref:`.Session.bulk_insert_mappings.return_defaults`
**greatly reduces the performance gains** of the method overall.
If the rows
to be inserted only refer to a single table, then there is no
reason this flag should be set as the returned default information
is not used.
:param render_nulls: When True, a value of ``None`` will result
in a NULL value being included in the INSERT statement, rather
than the column being omitted from the INSERT. This allows all
the rows being INSERTed to have the identical set of columns which
allows the full set of rows to be batched to the DBAPI. Normally,
each column-set that contains a different combination of NULL values
than the previous row must omit a different series of columns from
the rendered INSERT statement, which means it must be emitted as a
separate statement. By passing this flag, the full set of rows
are guaranteed to be batchable into one batch; the cost however is
that server-side defaults which are invoked by an omitted column will
be skipped, so care must be taken to ensure that these are not
necessary.
.. warning::
When this flag is set, **server side default SQL values will
not be invoked** for those columns that are inserted as NULL;
the NULL value will be sent explicitly. Care must be taken
to ensure that no server-side default functions need to be
invoked for the operation as a whole.
.. versionadded:: 1.1
.. seealso::
:ref:`bulk_operations`
:meth:`.Session.bulk_save_objects`
:meth:`.Session.bulk_update_mappings`
"""
self._bulk_save_mappings(
mapper,
mappings,
False,
False,
return_defaults,
False,
render_nulls,
)
def bulk_update_mappings(self, mapper, mappings):
"""Perform a bulk update of the given list of mapping dictionaries.
The bulk update feature allows plain Python dictionaries to be used as
the source of simple UPDATE operations which can be more easily
grouped together into higher performing "executemany"
operations. Using dictionaries, there is no "history" or session
state management features in use, reducing latency when updating
large numbers of simple rows.
.. versionadded:: 1.0.0
.. warning::
The bulk update feature allows for a lower-latency UPDATE
of rows at the expense of most other unit-of-work features.
Features such as object management, relationship handling,
and SQL clause support are **silently omitted** in favor of raw
UPDATES of records.
**Please read the list of caveats at**
:ref:`bulk_operations_caveats` **before using this method, and
fully test and confirm the functionality of all code developed
using these systems.**
:param mapper: a mapped class, or the actual :class:`_orm.Mapper`
object,
representing the single kind of object represented within the mapping
list.
:param mappings: a sequence of dictionaries, each one containing the
state of the mapped row to be updated, in terms of the attribute names
on the mapped class. If the mapping refers to multiple tables, such
as a joined-inheritance mapping, each dictionary may contain keys
corresponding to all tables. All those keys which are present and
are not part of the primary key are applied to the SET clause of the
UPDATE statement; the primary key values, which are required, are
applied to the WHERE clause.
.. seealso::
:ref:`bulk_operations`
:meth:`.Session.bulk_insert_mappings`
:meth:`.Session.bulk_save_objects`
"""
self._bulk_save_mappings(
mapper, mappings, True, False, False, False, False
)
def _bulk_save_mappings(
self,
mapper,
mappings,
isupdate,
isstates,
return_defaults,
update_changed_only,
render_nulls,
):
mapper = _class_to_mapper(mapper)
self._flushing = True
transaction = self.begin(subtransactions=True)
try:
if isupdate:
persistence._bulk_update(
mapper,
mappings,
transaction,
isstates,
update_changed_only,
)
else:
persistence._bulk_insert(
mapper,
mappings,
transaction,
isstates,
return_defaults,
render_nulls,
)
transaction.commit()
except:
with util.safe_reraise():
transaction.rollback(_capture_exception=True)
finally:
self._flushing = False
@util.deprecated_params(
passive=(
"0.8",
"The :paramref:`.Session.is_modified.passive` flag is deprecated "
"and will be removed in a future release. The flag is no longer "
"used and is ignored.",
)
)
def is_modified(self, instance, include_collections=True, passive=None):
r"""Return ``True`` if the given instance has locally
modified attributes.
This method retrieves the history for each instrumented
attribute on the instance and performs a comparison of the current
value to its previously committed value, if any.
It is in effect a more expensive and accurate
version of checking for the given instance in the
:attr:`.Session.dirty` collection; a full test for
each attribute's net "dirty" status is performed.
E.g.::
return session.is_modified(someobject)
A few caveats to this method apply:
* Instances present in the :attr:`.Session.dirty` collection may
report ``False`` when tested with this method. This is because
the object may have received change events via attribute mutation,
thus placing it in :attr:`.Session.dirty`, but ultimately the state
is the same as that loaded from the database, resulting in no net
change here.
* Scalar attributes may not have recorded the previously set
value when a new value was applied, if the attribute was not loaded,
or was expired, at the time the new value was received - in these
cases, the attribute is assumed to have a change, even if there is
ultimately no net change against its database value. SQLAlchemy in
most cases does not need the "old" value when a set event occurs, so
it skips the expense of a SQL call if the old value isn't present,
based on the assumption that an UPDATE of the scalar value is
usually needed, and in those few cases where it isn't, is less
expensive on average than issuing a defensive SELECT.
The "old" value is fetched unconditionally upon set only if the
attribute container has the ``active_history`` flag set to ``True``.
This flag is set typically for primary key attributes and scalar
object references that are not a simple many-to-one. To set this
flag for any arbitrary mapped column, use the ``active_history``
argument with :func:`.column_property`.
:param instance: mapped instance to be tested for pending changes.
:param include_collections: Indicates if multivalued collections
should be included in the operation. Setting this to ``False`` is a
way to detect only local-column based properties (i.e. scalar columns
or many-to-one foreign keys) that would result in an UPDATE for this
instance upon flush.
:param passive: not used
"""
state = object_state(instance)
if not state.modified:
return False
dict_ = state.dict
for attr in state.manager.attributes:
if (
not include_collections
and hasattr(attr.impl, "get_collection")
) or not hasattr(attr.impl, "get_history"):
continue
(added, unchanged, deleted) = attr.impl.get_history(
state, dict_, passive=attributes.NO_CHANGE
)
if added or deleted:
return True
else:
return False
@property
def is_active(self):
"""True if this :class:`.Session` is in "transaction mode" and
is not in "partial rollback" state.
The :class:`.Session` in its default mode of ``autocommit=False``
is essentially always in "transaction mode", in that a
:class:`.SessionTransaction` is associated with it as soon as
it is instantiated. This :class:`.SessionTransaction` is immediately
replaced with a new one as soon as it is ended, due to a rollback,
commit, or close operation.
"Transaction mode" does *not* indicate whether
or not actual database connection resources are in use; the
:class:`.SessionTransaction` object coordinates among zero or more
actual database transactions, and starts out with none, accumulating
individual DBAPI connections as different data sources are used
within its scope. The best way to track when a particular
:class:`.Session` has actually begun to use DBAPI resources is to
implement a listener using the :meth:`.SessionEvents.after_begin`
method, which will deliver both the :class:`.Session` as well as the
target :class:`_engine.Connection` to a user-defined event listener.
The "partial rollback" state refers to when an "inner" transaction,
typically used during a flush, encounters an error and emits a
rollback of the DBAPI connection. At this point, the
:class:`.Session` is in "partial rollback" and awaits for the user to
call :meth:`.Session.rollback`, in order to close out the
transaction stack. It is in this "partial rollback" period that the
:attr:`.is_active` flag returns False. After the call to
:meth:`.Session.rollback`, the :class:`.SessionTransaction` is
replaced with a new one and :attr:`.is_active` returns ``True`` again.
When a :class:`.Session` is used in ``autocommit=True`` mode, the
:class:`.SessionTransaction` is only instantiated within the scope
of a flush call, or when :meth:`.Session.begin` is called. So
:attr:`.is_active` will always be ``False`` outside of a flush or
:meth:`.Session.begin` block in this mode, and will be ``True``
within the :meth:`.Session.begin` block as long as it doesn't enter
"partial rollback" state.
From all the above, it follows that the only purpose to this flag is
for application frameworks that wish to detect if a "rollback" is
necessary within a generic error handling routine, for
:class:`.Session` objects that would otherwise be in
"partial rollback" mode. In a typical integration case, this is also
not necessary as it is standard practice to emit
:meth:`.Session.rollback` unconditionally within the outermost
exception catch.
To track the transactional state of a :class:`.Session` fully,
use event listeners, primarily the :meth:`.SessionEvents.after_begin`,
:meth:`.SessionEvents.after_commit`,
:meth:`.SessionEvents.after_rollback` and related events.
"""
return self.transaction and self.transaction.is_active
identity_map = None
"""A mapping of object identities to objects themselves.
Iterating through ``Session.identity_map.values()`` provides
access to the full set of persistent objects (i.e., those
that have row identity) currently in the session.
.. seealso::
:func:`.identity_key` - helper function to produce the keys used
in this dictionary.
"""
@property
def _dirty_states(self):
"""The set of all persistent states considered dirty.
This method returns all states that were modified including
those that were possibly deleted.
"""
return self.identity_map._dirty_states()
@property
def dirty(self):
"""The set of all persistent instances considered dirty.
E.g.::
some_mapped_object in session.dirty
Instances are considered dirty when they were modified but not
deleted.
Note that this 'dirty' calculation is 'optimistic'; most
attribute-setting or collection modification operations will
mark an instance as 'dirty' and place it in this set, even if
there is no net change to the attribute's value. At flush
time, the value of each attribute is compared to its
previously saved value, and if there's no net change, no SQL
operation will occur (this is a more expensive operation so
it's only done at flush time).
To check if an instance has actionable net changes to its
attributes, use the :meth:`.Session.is_modified` method.
"""
return util.IdentitySet(
[
state.obj()
for state in self._dirty_states
if state not in self._deleted
]
)
@property
def deleted(self):
"The set of all instances marked as 'deleted' within this ``Session``"
return util.IdentitySet(list(self._deleted.values()))
@property
def new(self):
"The set of all instances marked as 'new' within this ``Session``."
return util.IdentitySet(list(self._new.values()))
class sessionmaker(_SessionClassMethods):
"""A configurable :class:`.Session` factory.
The :class:`.sessionmaker` factory generates new
:class:`.Session` objects when called, creating them given
the configurational arguments established here.
e.g.::
# global scope
Session = sessionmaker(autoflush=False)
# later, in a local scope, create and use a session:
sess = Session()
Any keyword arguments sent to the constructor itself will override the
"configured" keywords::
Session = sessionmaker()
# bind an individual session to a connection
sess = Session(bind=connection)
The class also includes a method :meth:`.configure`, which can
be used to specify additional keyword arguments to the factory, which
will take effect for subsequent :class:`.Session` objects generated.
This is usually used to associate one or more :class:`_engine.Engine`
objects
with an existing :class:`.sessionmaker` factory before it is first
used::
# application starts
Session = sessionmaker()
# ... later
engine = create_engine('sqlite:///foo.db')
Session.configure(bind=engine)
sess = Session()
.. seealso::
:ref:`session_getting` - introductory text on creating
sessions using :class:`.sessionmaker`.
"""
def __init__(
self,
bind=None,
class_=Session,
autoflush=True,
autocommit=False,
expire_on_commit=True,
info=None,
**kw
):
r"""Construct a new :class:`.sessionmaker`.
All arguments here except for ``class_`` correspond to arguments
accepted by :class:`.Session` directly. See the
:meth:`.Session.__init__` docstring for more details on parameters.
:param bind: a :class:`_engine.Engine` or other :class:`.Connectable`
with
which newly created :class:`.Session` objects will be associated.
:param class\_: class to use in order to create new :class:`.Session`
objects. Defaults to :class:`.Session`.
:param autoflush: The autoflush setting to use with newly created
:class:`.Session` objects.
:param autocommit: The autocommit setting to use with newly created
:class:`.Session` objects.
:param expire_on_commit=True: the
:paramref:`_orm.Session.expire_on_commit` setting to use
with newly created :class:`.Session` objects.
:param info: optional dictionary of information that will be available
via :attr:`.Session.info`. Note this dictionary is *updated*, not
replaced, when the ``info`` parameter is specified to the specific
:class:`.Session` construction operation.
.. versionadded:: 0.9.0
:param \**kw: all other keyword arguments are passed to the
constructor of newly created :class:`.Session` objects.
"""
kw["bind"] = bind
kw["autoflush"] = autoflush
kw["autocommit"] = autocommit
kw["expire_on_commit"] = expire_on_commit
if info is not None:
kw["info"] = info
self.kw = kw
# make our own subclass of the given class, so that
# events can be associated with it specifically.
self.class_ = type(class_.__name__, (class_,), {})
def __call__(self, **local_kw):
"""Produce a new :class:`.Session` object using the configuration
established in this :class:`.sessionmaker`.
In Python, the ``__call__`` method is invoked on an object when
it is "called" in the same way as a function::
Session = sessionmaker()
session = Session() # invokes sessionmaker.__call__()
"""
for k, v in self.kw.items():
if k == "info" and "info" in local_kw:
d = v.copy()
d.update(local_kw["info"])
local_kw["info"] = d
else:
local_kw.setdefault(k, v)
return self.class_(**local_kw)
def configure(self, **new_kw):
"""(Re)configure the arguments for this sessionmaker.
e.g.::
Session = sessionmaker()
Session.configure(bind=create_engine('sqlite://'))
"""
self.kw.update(new_kw)
def __repr__(self):
return "%s(class_=%r, %s)" % (
self.__class__.__name__,
self.class_.__name__,
", ".join("%s=%r" % (k, v) for k, v in self.kw.items()),
)
def close_all_sessions():
"""Close all sessions in memory.
This function consults a global registry of all :class:`.Session` objects
and calls :meth:`.Session.close` on them, which resets them to a clean
state.
This function is not for general use but may be useful for test suites
within the teardown scheme.
.. versionadded:: 1.3
"""
for sess in _sessions.values():
sess.close()
def make_transient(instance):
"""Alter the state of the given instance so that it is :term:`transient`.
.. note::
:func:`.make_transient` is a special-case function for
advanced use cases only.
The given mapped instance is assumed to be in the :term:`persistent` or
:term:`detached` state. The function will remove its association with any
:class:`.Session` as well as its :attr:`.InstanceState.identity`. The
effect is that the object will behave as though it were newly constructed,
except retaining any attribute / collection values that were loaded at the
time of the call. The :attr:`.InstanceState.deleted` flag is also reset
if this object had been deleted as a result of using
:meth:`.Session.delete`.
.. warning::
:func:`.make_transient` does **not** "unexpire" or otherwise eagerly
load ORM-mapped attributes that are not currently loaded at the time
the function is called. This includes attributes which:
* were expired via :meth:`.Session.expire`
* were expired as the natural effect of committing a session
transaction, e.g. :meth:`.Session.commit`
* are normally :term:`lazy loaded` but are not currently loaded
* are "deferred" via :ref:`deferred` and are not yet loaded
* were not present in the query which loaded this object, such as that
which is common in joined table inheritance and other scenarios.
After :func:`.make_transient` is called, unloaded attributes such
as those above will normally resolve to the value ``None`` when
accessed, or an empty collection for a collection-oriented attribute.
As the object is transient and un-associated with any database
identity, it will no longer retrieve these values.
.. seealso::
:func:`.make_transient_to_detached`
"""
state = attributes.instance_state(instance)
s = _state_session(state)
if s:
s._expunge_states([state])
# remove expired state
state.expired_attributes.clear()
# remove deferred callables
if state.callables:
del state.callables
if state.key:
del state.key
if state._deleted:
del state._deleted
def make_transient_to_detached(instance):
"""Make the given transient instance :term:`detached`.
.. note::
:func:`.make_transient_to_detached` is a special-case function for
advanced use cases only.
All attribute history on the given instance
will be reset as though the instance were freshly loaded
from a query. Missing attributes will be marked as expired.
The primary key attributes of the object, which are required, will be made
into the "key" of the instance.
The object can then be added to a session, or merged
possibly with the load=False flag, at which point it will look
as if it were loaded that way, without emitting SQL.
This is a special use case function that differs from a normal
call to :meth:`.Session.merge` in that a given persistent state
can be manufactured without any SQL calls.
.. versionadded:: 0.9.5
.. seealso::
:func:`.make_transient`
:meth:`.Session.enable_relationship_loading`
"""
state = attributes.instance_state(instance)
if state.session_id or state.key:
raise sa_exc.InvalidRequestError("Given object must be transient")
state.key = state.mapper._identity_key_from_state(state)
if state._deleted:
del state._deleted
state._commit_all(state.dict)
state._expire_attributes(state.dict, state.unloaded_expirable)
def object_session(instance):
"""Return the :class:`.Session` to which the given instance belongs.
This is essentially the same as the :attr:`.InstanceState.session`
accessor. See that attribute for details.
"""
try:
state = attributes.instance_state(instance)
except exc.NO_STATE as err:
util.raise_(
exc.UnmappedInstanceError(instance),
replace_context=err,
)
else:
return _state_session(state)
_new_sessionid = util.counter()
| 37.941549
| 79
| 0.611804
|
084c6c9f212abe1ee4f1b0134832af80d9d67817
| 1,938
|
py
|
Python
|
Tools/Recon/Network/vulnerability_scanner.py
|
Apollo-o/Whistle
|
f6df3b67be81fe36f0ecb8b4831bc5dc9cdc4a52
|
[
"CC0-1.0"
] | null | null | null |
Tools/Recon/Network/vulnerability_scanner.py
|
Apollo-o/Whistle
|
f6df3b67be81fe36f0ecb8b4831bc5dc9cdc4a52
|
[
"CC0-1.0"
] | null | null | null |
Tools/Recon/Network/vulnerability_scanner.py
|
Apollo-o/Whistle
|
f6df3b67be81fe36f0ecb8b4831bc5dc9cdc4a52
|
[
"CC0-1.0"
] | null | null | null |
# Author: o-o
# Date: 8/24/2020
# Description: A Simple Vulnerability Scanner.
import random
import nmap
import os
# Vulnerability Scanner.
# Precondition: None.
# Postcondition: Vulnerable Device.
def scanner():
# Scan IP Addresses.
nm = nmap.PortScannerAsync()
while True:
# Await Results.
def callback_result(host, scan_result):
# Scan Found.
if str(scan_result).find("\'scan\'") != -1:
scan_1 = scan_result["scan"][host]
scan_2 = str(scan_1)
# Alive & Vulnerable.
if scan_1["status"]["state"] == "up" and "CVE" in scan_2:
# Local Variables.
color = "\033[0;33;20m"
default = "\033[0;0;0m"
name = scan_2[len("\'product\':") + scan_2.find("\'product\':"):].split("\'")[1]
# Save Data.
with open("devices.txt","a") as writer:
writer.write("\nIP Address: {}\n".format(host))
writer.write("Device: {}\n".format(name))
# Display Data.
print("{}IP Address{}\n{}\n".format(color,default,host))
print("{}Device{}\n{}\n".format(color,default,name))
print("{}Ports{}\nOpen\n".format(color,default))
print("{}Security{}\nVulnerable\n".format(color,default))
print("{}Scan{}".format(color,default))
os.system("pkill -f vulnerability_scanner.py")
# Scan IP Address.
nm.scan("{}.{}.{}.{}".format(random.randint(1,254),random.randint(1,254),random.randint(1,254),random.randint(1,254)), arguments="-T4 -Pn -F -sV --script=vuln,vulners", callback=callback_result)
# Until Complete.
while nm.still_scanning():
nm.wait(2)
# Start Scan.
scanner()
| 34.607143
| 202
| 0.509804
|
d6ed8246560e9a927b6391f2f98eeb841ae2218a
| 4,090
|
py
|
Python
|
dfirtrack_main/tests/taskstatus/test_taskstatus_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 4
|
2020-03-06T17:37:09.000Z
|
2020-03-17T07:50:55.000Z
|
dfirtrack_main/tests/taskstatus/test_taskstatus_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | null | null | null |
dfirtrack_main/tests/taskstatus/test_taskstatus_views.py
|
blackhatethicalhacking/dfirtrack
|
9c2e13015291f2981d14d63c9683e7c447e91f3a
|
[
"MIT"
] | 1
|
2020-03-06T20:54:52.000Z
|
2020-03-06T20:54:52.000Z
|
from django.contrib.auth.models import User
from django.test import TestCase
from dfirtrack_main.models import Taskstatus
import urllib.parse
class TaskstatusViewTestCase(TestCase):
""" taskstatus view tests """
@classmethod
def setUpTestData(cls):
# create object
Taskstatus.objects.create(taskstatus_name='taskstatus_1')
# create user
test_user = User.objects.create_user(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
def test_taskstatuss_list_not_logged_in(self):
""" test list view """
# create url
destination = '/login/?next=' + urllib.parse.quote('/taskstatuss/', safe='')
# get response
response = self.client.get('/taskstatuss/', follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_taskstatuss_list_logged_in(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/')
# compare
self.assertEqual(response.status_code, 200)
def test_taskstatuss_list_template(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/')
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/taskstatus/taskstatuss_list.html')
def test_taskstatuss_list_get_user_context(self):
""" test list view """
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/')
# compare
self.assertEqual(str(response.context['user']), 'testuser_taskstatus')
def test_taskstatuss_detail_not_logged_in(self):
""" test detail view """
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# create url
destination = '/login/?next=' + urllib.parse.quote('/taskstatuss/' + str(taskstatus_1.taskstatus_id), safe='')
# get response
response = self.client.get('/taskstatuss/' + str(taskstatus_1.taskstatus_id), follow=True)
# compare
self.assertRedirects(response, destination, status_code=302, target_status_code=200)
def test_taskstatuss_detail_logged_in(self):
""" test detail view """
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/' + str(taskstatus_1.taskstatus_id))
# compare
self.assertEqual(response.status_code, 200)
def test_taskstatuss_detail_template(self):
""" test detail view """
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/' + str(taskstatus_1.taskstatus_id))
# compare
self.assertTemplateUsed(response, 'dfirtrack_main/taskstatus/taskstatuss_detail.html')
def test_taskstatuss_detail_get_user_context(self):
""" test detail view """
# get object
taskstatus_1 = Taskstatus.objects.get(taskstatus_name='taskstatus_1')
# login testuser
login = self.client.login(username='testuser_taskstatus', password='TZjmjiUQviOnIEral6l9')
# get response
response = self.client.get('/taskstatuss/' + str(taskstatus_1.taskstatus_id))
# compare
self.assertEqual(str(response.context['user']), 'testuser_taskstatus')
| 39.326923
| 118
| 0.675306
|
62841e254fabfa0cca5714a96165dda0f1c43ccb
| 225
|
py
|
Python
|
openinterface/__init__.py
|
uibcdf/Pynterpred
|
05fc100712b9fa894d604fb82e0a0fcd488dccc7
|
[
"MIT"
] | 1
|
2020-06-02T04:02:15.000Z
|
2020-06-02T04:02:15.000Z
|
openinterface/__init__.py
|
uibcdf/OpenInterface
|
05fc100712b9fa894d604fb82e0a0fcd488dccc7
|
[
"MIT"
] | 1
|
2018-05-14T15:27:00.000Z
|
2018-05-14T15:29:23.000Z
|
openinterface/__init__.py
|
uibcdf/OpenInterface
|
05fc100712b9fa894d604fb82e0a0fcd488dccc7
|
[
"MIT"
] | null | null | null |
from .methods import buried_factors
from .methods import contact_map
from .methods import connectivity
from .methods import nearby_connectivity
from .methods import contacting_residues
from .methods import sasa, sasa_buried
| 28.125
| 40
| 0.853333
|
16ae2dea0fb354931eef2afeb6edbb7c9337a1af
| 3,801
|
py
|
Python
|
action/example/linear.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | 1
|
2017-02-13T10:25:11.000Z
|
2017-02-13T10:25:11.000Z
|
action/example/linear.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | null | null | null |
action/example/linear.py
|
JayMiao/MLAction
|
fec1c08fa33ed1f5d9b0befecc6dac551cc02302
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import logging
from autograd import grad
import autograd.numpy as np
from sklearn.model_selection import train_test_split
from sklearn.datasets import make_classification
# Change to DEBUG to see convergence
logging.basicConfig(level=logging.ERROR)
np.random.seed(1000)
EPS = 1e-15
def unhot(function):
"""Convert one-hot representation into one column."""
def wrapper(actual, predicted):
if len(actual.shape) > 1 and actual.shape[1] > 1:
actual = actual.argmax(axis=1)
if len(predicted.shape) > 1 and predicted.shape[1] > 1:
predicted = predicted.argmax(axis=1)
return function(actual, predicted)
return wrapper
@unhot
def classification_error(actual, predicted):
return (actual != predicted).sum() / float(actual.shape[0])
@unhot
def accuracy(actual, predicted):
return 1.0 - classification_error(actual, predicted)
def binary_crossentropy(actual, predicted):
predicted = np.clip(predicted, EPS, 1 - EPS)
return np.mean(-np.sum(actual * np.log(predicted) + (1 - actual) * np.log(1 - predicted)))
class LogisticRegression():
def __init__(self, lr=0.001, C=0.01, tolerance=0.0001, max_iters=1000):
self.C = C
self.tolerance = tolerance
self.lr = lr
self.max_iters = max_iters
self.errors = []
self.theta = []
self.n_samples, self.n_features = None, None
self.cost_func = binary_crossentropy
def _loss(self, w):
prediction = np.dot(self.X, w)
prediction = self.sigmoid(prediction)
loss = self.cost_func(self.y, prediction)
loss += (0.5 * self.C) * np.linalg.norm(w[:-1])
return loss
def _cost(self, theta):
prediction = np.dot(self.X, theta)
error = self.cost_func(self.y, prediction)
return error
def fit(self, X, y=None):
self.X = X
self.y = y
self.n_samples, self.n_features = X.shape
# Initialize weights + bias term
self.theta = np.ones(self.n_features + 1)
# Add an intercept column
self.X = self._add_intercept(self.X)
self.theta, self.errors = self._gradient_descent()
logging.info(' Theta: %s' % self.theta.flatten())
@staticmethod
def _add_intercept(X):
b = np.ones([X.shape[0], 1])
return np.concatenate([b, X], axis=1)
@staticmethod
def sigmoid(x):
return 0.5 * (np.tanh(x) + 1)
def predict(self, X=None):
X = self._add_intercept(X)
return self.sigmoid(X.dot(self.theta))
def _gradient_descent(self):
theta = self.theta
errors = [self._cost(theta)]
for i in range(1, self.max_iters + 1):
# Get derivative of the loss function
cost_d = grad(self._loss)
# Calculate gradient and update theta
delta = cost_d(theta)
theta -= self.lr * delta
errors.append(self._cost(theta))
logging.info('Iteration %s, error %s' % (i, errors[i]))
error_diff = np.linalg.norm(errors[i - 1] - errors[i])
if error_diff < self.tolerance:
logging.info('Convergence has reached.')
break
return theta, errors
if __name__ == '__main__':
logging.basicConfig(level=logging.ERROR)
X, y = make_classification(n_samples=1000, n_features=100, n_informative=75, random_state=1111, n_classes=2,
class_sep=2.5, )
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.1, random_state=1111)
model = LogisticRegression(lr=0.01, max_iters=500, C=0.01)
model.fit(X_train, y_train)
predictions = model.predict(X_test)
print('classification accuracy', accuracy(y_test, predictions))
| 31.941176
| 112
| 0.624836
|
f395127902101da886fe54862d539467df3e8468
| 15,860
|
py
|
Python
|
pandas/io/excel/_openpyxl.py
|
ylin00/pandas
|
dc1c849f0cf4f1b8c5602a54f5231f6b57d1d913
|
[
"BSD-3-Clause"
] | 1
|
2020-10-08T00:29:41.000Z
|
2020-10-08T00:29:41.000Z
|
pandas/io/excel/_openpyxl.py
|
ylin00/pandas
|
dc1c849f0cf4f1b8c5602a54f5231f6b57d1d913
|
[
"BSD-3-Clause"
] | null | null | null |
pandas/io/excel/_openpyxl.py
|
ylin00/pandas
|
dc1c849f0cf4f1b8c5602a54f5231f6b57d1d913
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import TYPE_CHECKING, Dict, List, Optional
import numpy as np
from pandas._typing import FilePathOrBuffer, Scalar, StorageOptions
from pandas.compat._optional import import_optional_dependency
from pandas.io.excel._base import BaseExcelReader, ExcelWriter
from pandas.io.excel._util import validate_freeze_panes
if TYPE_CHECKING:
from openpyxl.descriptors.serialisable import Serialisable
class _OpenpyxlWriter(ExcelWriter):
engine = "openpyxl"
supported_extensions = (".xlsx", ".xlsm")
def __init__(self, path, engine=None, mode="w", **engine_kwargs):
# Use the openpyxl module as the Excel writer.
from openpyxl.workbook import Workbook
super().__init__(path, mode=mode, **engine_kwargs)
if self.mode == "a": # Load from existing workbook
from openpyxl import load_workbook
self.book = load_workbook(self.path)
else:
# Create workbook object with default optimized_write=True.
self.book = Workbook()
if self.book.worksheets:
self.book.remove(self.book.worksheets[0])
def save(self):
"""
Save workbook to disk.
"""
self.book.save(self.path)
@classmethod
def _convert_to_style_kwargs(cls, style_dict: dict) -> Dict[str, "Serialisable"]:
"""
Convert a style_dict to a set of kwargs suitable for initializing
or updating-on-copy an openpyxl v2 style object.
Parameters
----------
style_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'font'
'fill'
'border' ('borders')
'alignment'
'number_format'
'protection'
Returns
-------
style_kwargs : dict
A dict with the same, normalized keys as ``style_dict`` but each
value has been replaced with a native openpyxl style object of the
appropriate class.
"""
_style_key_map = {"borders": "border"}
style_kwargs: Dict[str, Serialisable] = {}
for k, v in style_dict.items():
if k in _style_key_map:
k = _style_key_map[k]
_conv_to_x = getattr(cls, f"_convert_to_{k}", lambda x: None)
new_v = _conv_to_x(v)
if new_v:
style_kwargs[k] = new_v
return style_kwargs
@classmethod
def _convert_to_color(cls, color_spec):
"""
Convert ``color_spec`` to an openpyxl v2 Color object.
Parameters
----------
color_spec : str, dict
A 32-bit ARGB hex string, or a dict with zero or more of the
following keys.
'rgb'
'indexed'
'auto'
'theme'
'tint'
'index'
'type'
Returns
-------
color : openpyxl.styles.Color
"""
from openpyxl.styles import Color
if isinstance(color_spec, str):
return Color(color_spec)
else:
return Color(**color_spec)
@classmethod
def _convert_to_font(cls, font_dict):
"""
Convert ``font_dict`` to an openpyxl v2 Font object.
Parameters
----------
font_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'name'
'size' ('sz')
'bold' ('b')
'italic' ('i')
'underline' ('u')
'strikethrough' ('strike')
'color'
'vertAlign' ('vertalign')
'charset'
'scheme'
'family'
'outline'
'shadow'
'condense'
Returns
-------
font : openpyxl.styles.Font
"""
from openpyxl.styles import Font
_font_key_map = {
"sz": "size",
"b": "bold",
"i": "italic",
"u": "underline",
"strike": "strikethrough",
"vertalign": "vertAlign",
}
font_kwargs = {}
for k, v in font_dict.items():
if k in _font_key_map:
k = _font_key_map[k]
if k == "color":
v = cls._convert_to_color(v)
font_kwargs[k] = v
return Font(**font_kwargs)
@classmethod
def _convert_to_stop(cls, stop_seq):
"""
Convert ``stop_seq`` to a list of openpyxl v2 Color objects,
suitable for initializing the ``GradientFill`` ``stop`` parameter.
Parameters
----------
stop_seq : iterable
An iterable that yields objects suitable for consumption by
``_convert_to_color``.
Returns
-------
stop : list of openpyxl.styles.Color
"""
return map(cls._convert_to_color, stop_seq)
@classmethod
def _convert_to_fill(cls, fill_dict):
"""
Convert ``fill_dict`` to an openpyxl v2 Fill object.
Parameters
----------
fill_dict : dict
A dict with one or more of the following keys (or their synonyms),
'fill_type' ('patternType', 'patterntype')
'start_color' ('fgColor', 'fgcolor')
'end_color' ('bgColor', 'bgcolor')
or one or more of the following keys (or their synonyms).
'type' ('fill_type')
'degree'
'left'
'right'
'top'
'bottom'
'stop'
Returns
-------
fill : openpyxl.styles.Fill
"""
from openpyxl.styles import GradientFill, PatternFill
_pattern_fill_key_map = {
"patternType": "fill_type",
"patterntype": "fill_type",
"fgColor": "start_color",
"fgcolor": "start_color",
"bgColor": "end_color",
"bgcolor": "end_color",
}
_gradient_fill_key_map = {"fill_type": "type"}
pfill_kwargs = {}
gfill_kwargs = {}
for k, v in fill_dict.items():
pk = gk = None
if k in _pattern_fill_key_map:
pk = _pattern_fill_key_map[k]
if k in _gradient_fill_key_map:
gk = _gradient_fill_key_map[k]
if pk in ["start_color", "end_color"]:
v = cls._convert_to_color(v)
if gk == "stop":
v = cls._convert_to_stop(v)
if pk:
pfill_kwargs[pk] = v
elif gk:
gfill_kwargs[gk] = v
else:
pfill_kwargs[k] = v
gfill_kwargs[k] = v
try:
return PatternFill(**pfill_kwargs)
except TypeError:
return GradientFill(**gfill_kwargs)
@classmethod
def _convert_to_side(cls, side_spec):
"""
Convert ``side_spec`` to an openpyxl v2 Side object.
Parameters
----------
side_spec : str, dict
A string specifying the border style, or a dict with zero or more
of the following keys (or their synonyms).
'style' ('border_style')
'color'
Returns
-------
side : openpyxl.styles.Side
"""
from openpyxl.styles import Side
_side_key_map = {"border_style": "style"}
if isinstance(side_spec, str):
return Side(style=side_spec)
side_kwargs = {}
for k, v in side_spec.items():
if k in _side_key_map:
k = _side_key_map[k]
if k == "color":
v = cls._convert_to_color(v)
side_kwargs[k] = v
return Side(**side_kwargs)
@classmethod
def _convert_to_border(cls, border_dict):
"""
Convert ``border_dict`` to an openpyxl v2 Border object.
Parameters
----------
border_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'left'
'right'
'top'
'bottom'
'diagonal'
'diagonal_direction'
'vertical'
'horizontal'
'diagonalUp' ('diagonalup')
'diagonalDown' ('diagonaldown')
'outline'
Returns
-------
border : openpyxl.styles.Border
"""
from openpyxl.styles import Border
_border_key_map = {"diagonalup": "diagonalUp", "diagonaldown": "diagonalDown"}
border_kwargs = {}
for k, v in border_dict.items():
if k in _border_key_map:
k = _border_key_map[k]
if k == "color":
v = cls._convert_to_color(v)
if k in ["left", "right", "top", "bottom", "diagonal"]:
v = cls._convert_to_side(v)
border_kwargs[k] = v
return Border(**border_kwargs)
@classmethod
def _convert_to_alignment(cls, alignment_dict):
"""
Convert ``alignment_dict`` to an openpyxl v2 Alignment object.
Parameters
----------
alignment_dict : dict
A dict with zero or more of the following keys (or their synonyms).
'horizontal'
'vertical'
'text_rotation'
'wrap_text'
'shrink_to_fit'
'indent'
Returns
-------
alignment : openpyxl.styles.Alignment
"""
from openpyxl.styles import Alignment
return Alignment(**alignment_dict)
@classmethod
def _convert_to_number_format(cls, number_format_dict):
"""
Convert ``number_format_dict`` to an openpyxl v2.1.0 number format
initializer.
Parameters
----------
number_format_dict : dict
A dict with zero or more of the following keys.
'format_code' : str
Returns
-------
number_format : str
"""
return number_format_dict["format_code"]
@classmethod
def _convert_to_protection(cls, protection_dict):
"""
Convert ``protection_dict`` to an openpyxl v2 Protection object.
Parameters
----------
protection_dict : dict
A dict with zero or more of the following keys.
'locked'
'hidden'
Returns
-------
"""
from openpyxl.styles import Protection
return Protection(**protection_dict)
def write_cells(
self, cells, sheet_name=None, startrow=0, startcol=0, freeze_panes=None
):
# Write the frame cells using openpyxl.
sheet_name = self._get_sheet_name(sheet_name)
_style_cache: Dict[str, Dict[str, Serialisable]] = {}
if sheet_name in self.sheets:
wks = self.sheets[sheet_name]
else:
wks = self.book.create_sheet()
wks.title = sheet_name
self.sheets[sheet_name] = wks
if validate_freeze_panes(freeze_panes):
wks.freeze_panes = wks.cell(
row=freeze_panes[0] + 1, column=freeze_panes[1] + 1
)
for cell in cells:
xcell = wks.cell(
row=startrow + cell.row + 1, column=startcol + cell.col + 1
)
xcell.value, fmt = self._value_with_fmt(cell.val)
if fmt:
xcell.number_format = fmt
style_kwargs: Optional[Dict[str, Serialisable]] = {}
if cell.style:
key = str(cell.style)
style_kwargs = _style_cache.get(key)
if style_kwargs is None:
style_kwargs = self._convert_to_style_kwargs(cell.style)
_style_cache[key] = style_kwargs
if style_kwargs:
for k, v in style_kwargs.items():
setattr(xcell, k, v)
if cell.mergestart is not None and cell.mergeend is not None:
wks.merge_cells(
start_row=startrow + cell.row + 1,
start_column=startcol + cell.col + 1,
end_column=startcol + cell.mergeend + 1,
end_row=startrow + cell.mergestart + 1,
)
# When cells are merged only the top-left cell is preserved
# The behaviour of the other cells in a merged range is
# undefined
if style_kwargs:
first_row = startrow + cell.row + 1
last_row = startrow + cell.mergestart + 1
first_col = startcol + cell.col + 1
last_col = startcol + cell.mergeend + 1
for row in range(first_row, last_row + 1):
for col in range(first_col, last_col + 1):
if row == first_row and col == first_col:
# Ignore first cell. It is already handled.
continue
xcell = wks.cell(column=col, row=row)
for k, v in style_kwargs.items():
setattr(xcell, k, v)
class _OpenpyxlReader(BaseExcelReader):
def __init__(
self,
filepath_or_buffer: FilePathOrBuffer,
storage_options: StorageOptions = None,
) -> None:
"""
Reader using openpyxl engine.
Parameters
----------
filepath_or_buffer : string, path object or Workbook
Object to be parsed.
storage_options : dict, optional
passed to fsspec for appropriate URLs (see ``get_filepath_or_buffer``)
"""
import_optional_dependency("openpyxl")
super().__init__(filepath_or_buffer, storage_options=storage_options)
@property
def _workbook_class(self):
from openpyxl import Workbook
return Workbook
def load_workbook(self, filepath_or_buffer: FilePathOrBuffer):
from openpyxl import load_workbook
return load_workbook(
filepath_or_buffer, read_only=True, data_only=True, keep_links=False
)
def close(self):
# https://stackoverflow.com/questions/31416842/
# openpyxl-does-not-close-excel-workbook-in-read-only-mode
self.book.close()
@property
def sheet_names(self) -> List[str]:
return self.book.sheetnames
def get_sheet_by_name(self, name: str):
return self.book[name]
def get_sheet_by_index(self, index: int):
return self.book.worksheets[index]
def _convert_cell(self, cell, convert_float: bool) -> Scalar:
from openpyxl.cell.cell import TYPE_BOOL, TYPE_ERROR, TYPE_NUMERIC
if cell.is_date:
return cell.value
elif cell.data_type == TYPE_ERROR:
return np.nan
elif cell.data_type == TYPE_BOOL:
return bool(cell.value)
elif cell.value is None:
return "" # compat with xlrd
elif cell.data_type == TYPE_NUMERIC:
# GH5394
if convert_float:
val = int(cell.value)
if val == cell.value:
return val
else:
return float(cell.value)
return cell.value
def get_sheet_data(self, sheet, convert_float: bool) -> List[List[Scalar]]:
data: List[List[Scalar]] = []
for row in sheet.rows:
data.append([self._convert_cell(cell, convert_float) for cell in row])
return data
| 30.676983
| 86
| 0.527112
|
4e102321fc7fcc789e03195eee31cd5296b57415
| 960
|
py
|
Python
|
evolving_NN/task2.py
|
gister9000/Combining-fuzzy-logic-neural-networks-and-genetic-algorithm
|
918431f54898592efc877af347a780bcfce50835
|
[
"MIT"
] | null | null | null |
evolving_NN/task2.py
|
gister9000/Combining-fuzzy-logic-neural-networks-and-genetic-algorithm
|
918431f54898592efc877af347a780bcfce50835
|
[
"MIT"
] | null | null | null |
evolving_NN/task2.py
|
gister9000/Combining-fuzzy-logic-neural-networks-and-genetic-algorithm
|
918431f54898592efc877af347a780bcfce50835
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
# Neural network dataset preparation
# inputs: x, y
# outputs: 100 | 010 | 001
lines = open("zad7-dataset.txt", "r").readlines()
class_100x = []
class_010x = []
class_001x = []
class_100y = []
class_010y = []
class_001y = []
for line in lines:
values = line.replace("\n","").split("\t")
if values[2] == '1':
class_100x.append( float(values[0]) )
class_100y.append( float(values[1]) )
elif values[3] == '1':
class_010x.append( float(values[0]) )
class_010y.append( float(values[1]) )
elif values[4] == '1':
class_001x.append( float(values[0]) )
class_001y.append( float(values[1]) )
else:
raise Exception("invalid example\n" + line)
plt.scatter(class_100x, class_100y, label='class 100')
plt.scatter(class_010x, class_010y, label='class 010')
plt.scatter(class_001x, class_001y, label='class 001')
plt.legend(loc='best')
plt.show()
| 29.090909
| 54
| 0.644792
|
44cca340b0ba7220fb7815c5d7f0abd73b8a5877
| 2,352
|
py
|
Python
|
virtual/lib/python3.6/site-packages/django_settings_export.py
|
Irene-nandy/hello_gallery
|
964f85828aba20ef795f8c9d826ba08a8c688462
|
[
"MIT"
] | null | null | null |
virtual/lib/python3.6/site-packages/django_settings_export.py
|
Irene-nandy/hello_gallery
|
964f85828aba20ef795f8c9d826ba08a8c688462
|
[
"MIT"
] | null | null | null |
virtual/lib/python3.6/site-packages/django_settings_export.py
|
Irene-nandy/hello_gallery
|
964f85828aba20ef795f8c9d826ba08a8c688462
|
[
"MIT"
] | null | null | null |
"""
Export Django settings to templates
https://github.com/jkbrzt/django-settings-export
"""
from django.conf import settings as django_settings
from django.core.exceptions import ImproperlyConfigured
__version__ = '1.2.1'
VARIABLE_NAME = getattr(django_settings,
'SETTINGS_EXPORT_VARIABLE_NAME',
'settings')
class SettingsExportError(ImproperlyConfigured):
"""Base error indicating misconfiguration."""
class UndefinedSettingError(SettingsExportError):
"""An undefined setting name included in SETTINGS_EXPORT."""
class UnexportedSettingError(SettingsExportError):
"""An unexported setting has been accessed from a template."""
def settings_export(request):
"""
The template context processor that adds settings defined in
`SETTINGS_EXPORT` to the context. If SETTINGS_EXPORT_VARIABLE_NAME is not
set, the context variable will be `settings`.
"""
variable_name = getattr(django_settings,
'SETTINGS_EXPORT_VARIABLE_NAME',
'settings')
return {
variable_name: _get_exported_settings()
}
class ExportedSettings(dict):
def __getitem__(self, item):
"""Fail loudly if accessing a setting that is not exported."""
try:
return super(ExportedSettings, self).__getitem__(item)
except KeyError:
if hasattr(self, item):
# Let the KeyError propagate so that Django templates
# can access the existing attribute (e.g. `items()`).
raise
raise UnexportedSettingError(
'The `{key}` setting key is not accessible'
' from templates: add "{key}" to'
' `settings.SETTINGS_EXPORT` to change that.'
.format(key=item)
)
def _get_exported_settings():
exported_settings = ExportedSettings()
for key in getattr(django_settings, 'SETTINGS_EXPORT', []):
try:
value = getattr(django_settings, key)
except AttributeError:
raise UndefinedSettingError(
'"settings.%s" is included in settings.SETTINGS_EXPORT '
'but it does not exist. '
% key
)
exported_settings[key] = value
return exported_settings
| 29.772152
| 77
| 0.630527
|
8376e4b804b72319a03fcddd8a01bb1e1f17ee13
| 9,125
|
py
|
Python
|
gui/mem_dixy/tag/alphabet.py
|
Mem-Dixy/web
|
e5373ec86e3b7048c353071dc3555a96340b035e
|
[
"MIT"
] | null | null | null |
gui/mem_dixy/tag/alphabet.py
|
Mem-Dixy/web
|
e5373ec86e3b7048c353071dc3555a96340b035e
|
[
"MIT"
] | 1
|
2021-05-09T23:36:35.000Z
|
2021-05-09T23:36:35.000Z
|
gui/mem_dixy/tag/alphabet.py
|
Mem-Dixy/web
|
e5373ec86e3b7048c353071dc3555a96340b035e
|
[
"MIT"
] | null | null | null |
from mem_dixy.Unicode.U0000 import *
from mem_dixy.Unicode.U0080 import *
from mem_dixy.Unicode_Encoding.U0000 import encoding as Basic_Latin
from mem_dixy.Unicode_Encoding.U0080 import encoding as Latin_1_Supplement
# convert = Basic_Latin | Latin_1_Supplement # NOTE: 3.9+ ONLY
convert = {**Basic_Latin, **Latin_1_Supplement}
lowercase = {
LATIN_CAPITAL_LETTER_A: LATIN_SMALL_LETTER_A,
LATIN_CAPITAL_LETTER_B: LATIN_SMALL_LETTER_B,
LATIN_CAPITAL_LETTER_C: LATIN_SMALL_LETTER_C,
LATIN_CAPITAL_LETTER_D: LATIN_SMALL_LETTER_D,
LATIN_CAPITAL_LETTER_E: LATIN_SMALL_LETTER_E,
LATIN_CAPITAL_LETTER_F: LATIN_SMALL_LETTER_F,
LATIN_CAPITAL_LETTER_G: LATIN_SMALL_LETTER_G,
LATIN_CAPITAL_LETTER_H: LATIN_SMALL_LETTER_H,
LATIN_CAPITAL_LETTER_I: LATIN_SMALL_LETTER_I,
LATIN_CAPITAL_LETTER_J: LATIN_SMALL_LETTER_J,
LATIN_CAPITAL_LETTER_K: LATIN_SMALL_LETTER_K,
LATIN_CAPITAL_LETTER_L: LATIN_SMALL_LETTER_L,
LATIN_CAPITAL_LETTER_M: LATIN_SMALL_LETTER_M,
LATIN_CAPITAL_LETTER_N: LATIN_SMALL_LETTER_N,
LATIN_CAPITAL_LETTER_O: LATIN_SMALL_LETTER_O,
LATIN_CAPITAL_LETTER_P: LATIN_SMALL_LETTER_P,
LATIN_CAPITAL_LETTER_Q: LATIN_SMALL_LETTER_Q,
LATIN_CAPITAL_LETTER_R: LATIN_SMALL_LETTER_R,
LATIN_CAPITAL_LETTER_S: LATIN_SMALL_LETTER_S,
LATIN_CAPITAL_LETTER_T: LATIN_SMALL_LETTER_T,
LATIN_CAPITAL_LETTER_U: LATIN_SMALL_LETTER_U,
LATIN_CAPITAL_LETTER_V: LATIN_SMALL_LETTER_V,
LATIN_CAPITAL_LETTER_W: LATIN_SMALL_LETTER_W,
LATIN_CAPITAL_LETTER_X: LATIN_SMALL_LETTER_X,
LATIN_CAPITAL_LETTER_Y: LATIN_SMALL_LETTER_Y,
LATIN_CAPITAL_LETTER_Z: LATIN_SMALL_LETTER_Z,
LOW_LINE: LOW_LINE,
LATIN_SMALL_LETTER_A: LATIN_SMALL_LETTER_A,
LATIN_SMALL_LETTER_B: LATIN_SMALL_LETTER_B,
LATIN_SMALL_LETTER_C: LATIN_SMALL_LETTER_C,
LATIN_SMALL_LETTER_D: LATIN_SMALL_LETTER_D,
LATIN_SMALL_LETTER_E: LATIN_SMALL_LETTER_E,
LATIN_SMALL_LETTER_F: LATIN_SMALL_LETTER_F,
LATIN_SMALL_LETTER_G: LATIN_SMALL_LETTER_G,
LATIN_SMALL_LETTER_H: LATIN_SMALL_LETTER_H,
LATIN_SMALL_LETTER_I: LATIN_SMALL_LETTER_I,
LATIN_SMALL_LETTER_J: LATIN_SMALL_LETTER_J,
LATIN_SMALL_LETTER_K: LATIN_SMALL_LETTER_K,
LATIN_SMALL_LETTER_L: LATIN_SMALL_LETTER_L,
LATIN_SMALL_LETTER_M: LATIN_SMALL_LETTER_M,
LATIN_SMALL_LETTER_N: LATIN_SMALL_LETTER_N,
LATIN_SMALL_LETTER_O: LATIN_SMALL_LETTER_O,
LATIN_SMALL_LETTER_P: LATIN_SMALL_LETTER_P,
LATIN_SMALL_LETTER_Q: LATIN_SMALL_LETTER_Q,
LATIN_SMALL_LETTER_R: LATIN_SMALL_LETTER_R,
LATIN_SMALL_LETTER_S: LATIN_SMALL_LETTER_S,
LATIN_SMALL_LETTER_T: LATIN_SMALL_LETTER_T,
LATIN_SMALL_LETTER_U: LATIN_SMALL_LETTER_U,
LATIN_SMALL_LETTER_V: LATIN_SMALL_LETTER_V,
LATIN_SMALL_LETTER_W: LATIN_SMALL_LETTER_W,
LATIN_SMALL_LETTER_X: LATIN_SMALL_LETTER_X,
LATIN_SMALL_LETTER_Y: LATIN_SMALL_LETTER_Y,
LATIN_SMALL_LETTER_Z: LATIN_SMALL_LETTER_Z,
}
number = {
DIGIT_ZERO,
DIGIT_ONE,
DIGIT_TWO,
DIGIT_THREE,
DIGIT_FOUR,
DIGIT_FIVE,
DIGIT_SIX,
DIGIT_SEVEN,
DIGIT_EIGHT,
DIGIT_NINE,
LATIN_CAPITAL_LETTER_A,
LATIN_CAPITAL_LETTER_B,
LATIN_CAPITAL_LETTER_C,
LATIN_CAPITAL_LETTER_D,
LATIN_CAPITAL_LETTER_E,
LATIN_CAPITAL_LETTER_F,
}
tag = {
LOW_LINE,
LATIN_SMALL_LETTER_A,
LATIN_SMALL_LETTER_B,
LATIN_SMALL_LETTER_C,
LATIN_SMALL_LETTER_D,
LATIN_SMALL_LETTER_E,
LATIN_SMALL_LETTER_F,
LATIN_SMALL_LETTER_G,
LATIN_SMALL_LETTER_H,
LATIN_SMALL_LETTER_I,
LATIN_SMALL_LETTER_J,
LATIN_SMALL_LETTER_K,
LATIN_SMALL_LETTER_L,
LATIN_SMALL_LETTER_M,
LATIN_SMALL_LETTER_N,
LATIN_SMALL_LETTER_O,
LATIN_SMALL_LETTER_P,
LATIN_SMALL_LETTER_Q,
LATIN_SMALL_LETTER_R,
LATIN_SMALL_LETTER_S,
LATIN_SMALL_LETTER_T,
LATIN_SMALL_LETTER_U,
LATIN_SMALL_LETTER_V,
LATIN_SMALL_LETTER_W,
LATIN_SMALL_LETTER_X,
LATIN_SMALL_LETTER_Y,
LATIN_SMALL_LETTER_Z
}
space = {
SPACE
}
sql_and = {
AMPERSAND
}
sql_is = {
PLUS_SIGN
}
sql_not = {
HYPHEN_MINUS
}
sql_or = {
VERTICAL_LINE
}
logica_operator = sql_and | sql_is | sql_not | sql_or
letter = {
DIGIT_ZERO,
DIGIT_ONE,
DIGIT_TWO,
DIGIT_THREE,
DIGIT_FOUR,
DIGIT_FIVE,
DIGIT_SIX,
DIGIT_SEVEN,
DIGIT_EIGHT,
DIGIT_NINE,
LATIN_CAPITAL_LETTER_A,
LATIN_CAPITAL_LETTER_B,
LATIN_CAPITAL_LETTER_C,
LATIN_CAPITAL_LETTER_D,
LATIN_CAPITAL_LETTER_E,
LATIN_CAPITAL_LETTER_F,
LATIN_CAPITAL_LETTER_G,
LATIN_CAPITAL_LETTER_H,
LATIN_CAPITAL_LETTER_I,
LATIN_CAPITAL_LETTER_J,
LATIN_CAPITAL_LETTER_K,
LATIN_CAPITAL_LETTER_L,
LATIN_CAPITAL_LETTER_M,
LATIN_CAPITAL_LETTER_N,
LATIN_CAPITAL_LETTER_O,
LATIN_CAPITAL_LETTER_P,
LATIN_CAPITAL_LETTER_Q,
LATIN_CAPITAL_LETTER_R,
LATIN_CAPITAL_LETTER_S,
LATIN_CAPITAL_LETTER_T,
LATIN_CAPITAL_LETTER_U,
LATIN_CAPITAL_LETTER_V,
LATIN_CAPITAL_LETTER_W,
LATIN_CAPITAL_LETTER_X,
LATIN_CAPITAL_LETTER_Y,
LATIN_CAPITAL_LETTER_Z,
LOW_LINE,
LATIN_SMALL_LETTER_A,
LATIN_SMALL_LETTER_B,
LATIN_SMALL_LETTER_C,
LATIN_SMALL_LETTER_D,
LATIN_SMALL_LETTER_E,
LATIN_SMALL_LETTER_F,
LATIN_SMALL_LETTER_G,
LATIN_SMALL_LETTER_H,
LATIN_SMALL_LETTER_I,
LATIN_SMALL_LETTER_J,
LATIN_SMALL_LETTER_K,
LATIN_SMALL_LETTER_L,
LATIN_SMALL_LETTER_M,
LATIN_SMALL_LETTER_N,
LATIN_SMALL_LETTER_O,
LATIN_SMALL_LETTER_P,
LATIN_SMALL_LETTER_Q,
LATIN_SMALL_LETTER_R,
LATIN_SMALL_LETTER_S,
LATIN_SMALL_LETTER_T,
LATIN_SMALL_LETTER_U,
LATIN_SMALL_LETTER_V,
LATIN_SMALL_LETTER_W,
LATIN_SMALL_LETTER_X,
LATIN_SMALL_LETTER_Y,
LATIN_SMALL_LETTER_Z
}
one_token = {
QUOTATION_MARK,
NUMBER_SIGN,
DOLLAR_SIGN,
PERCENT_SIGN,
AMPERSAND,
APOSTROPHE,
LEFT_PARENTHESIS,
RIGHT_PARENTHESIS,
ASTERISK,
PLUS_SIGN,
COMMA,
HYPHEN_MINUS,
FULL_STOP,
SOLIDUS,
COLON,
SEMICOLON,
QUESTION_MARK,
COMMERCIA_AT,
LEFT_SQUARE_BRACKET,
REVERSE_SOLIDUS,
RIGHT_SQUARE_BRACKET,
CIRCUMFLEX_ACCENT,
GRAVE_ACCENT,
LEFT_CURLY_BRACKET,
VERTICAL_LINE,
RIGHT_CURLY_BRACKET,
TILDE,
}
comparison = {
EXCLAMATION_MARK,
LESS_THAN_SIGN,
EQUALS_SIGN,
GREATER_THAN_SIGN,
}
logical = {
AMPERSAND,
PLUS_SIGN,
HYPHEN_MINUS,
CIRCUMFLEX_ACCENT,
VERTICAL_LINE,
TILDE
}
digit = {
DIGIT_ZERO,
DIGIT_ONE,
DIGIT_TWO,
DIGIT_THREE,
DIGIT_FOUR,
DIGIT_FIVE,
DIGIT_SIX,
DIGIT_SEVEN,
DIGIT_EIGHT,
DIGIT_NINE,
}
consonant = {
LATIN_SMALL_LETTER_B,
LATIN_SMALL_LETTER_C,
LATIN_SMALL_LETTER_D,
LATIN_SMALL_LETTER_F,
LATIN_SMALL_LETTER_G,
LATIN_SMALL_LETTER_H,
LATIN_SMALL_LETTER_J,
LATIN_SMALL_LETTER_K,
LATIN_SMALL_LETTER_L,
LATIN_SMALL_LETTER_M,
LATIN_SMALL_LETTER_N,
LATIN_SMALL_LETTER_P,
LATIN_SMALL_LETTER_Q,
LATIN_SMALL_LETTER_R,
LATIN_SMALL_LETTER_S,
LATIN_SMALL_LETTER_T,
LATIN_SMALL_LETTER_V,
LATIN_SMALL_LETTER_W,
LATIN_SMALL_LETTER_X,
LATIN_SMALL_LETTER_Z
}
vowel = {
LATIN_SMALL_LETTER_A,
LATIN_SMALL_LETTER_E,
LATIN_SMALL_LETTER_I,
LATIN_SMALL_LETTER_O,
LATIN_SMALL_LETTER_U,
LATIN_SMALL_LETTER_Y
}
wildcard = {
NUMBER_SIGN,
PERCENT_SIGN,
ASTERISK,
COLON,
QUESTION_MARK,
COMMERCIA_AT,
}
# The percent sign % can be used to match a number
# The asterisk (*) matches any number of characters. That means that you can use it as a placeholder for any sequence of letters or symbols. For example, if you enter blueb* you'll get all the terms that start with "blueb"; if you enter *bird you'll get all the terms that end with "bird"; if you enter *lueb* you'll get all the terms that contain the sequence "lueb", and so forth. An asterisk can match zero characters, too.
# The question mark (?) matches exactly one character. That means that you can use it as a placeholder for a single letter or symbol. The query l?b?n?n, for example, will find the word "Lebanon".
# (NEW!) The number-sign (#) matches any English consonant. For example, the query tra#t finds the word "tract" but not "trait".
# (NEW!) The at-sign (@) matches any English vowel. For example, the query abo@t finds the word "about" but not "abort".
# Filter by meaning: Did you know that you can filter your wildcard searches by meaning? Put a colon (:) after your pattern and then type a word or two describing what you're looking for. For example, the query p*:ireland finds terms beginning with "p" that have something to do with Ireland, and the query *:widespread epidemic searches for terms having something to do with "widespread epidemic". The latter case demonstrates how OneLook.com can be used as a means of finding a word (in this case, pandemic) if you only know its definition. See the reverse dictionary page for more details on this feature.
comparison | logical | digit | consonant | vowel | wildcard
all_token = one_token | letter | comparison | logical
| 28.076923
| 608
| 0.761973
|
6737586dac2520ab5a1c6291aef2b8db4d0dcebf
| 914
|
py
|
Python
|
A5T5SQLite.py
|
qasimy123/assignment5-CMPUT291
|
5082dc251cd7ffb484fbde8dce9d2bfe607fa19e
|
[
"Apache-2.0"
] | null | null | null |
A5T5SQLite.py
|
qasimy123/assignment5-CMPUT291
|
5082dc251cd7ffb484fbde8dce9d2bfe607fa19e
|
[
"Apache-2.0"
] | null | null | null |
A5T5SQLite.py
|
qasimy123/assignment5-CMPUT291
|
5082dc251cd7ffb484fbde8dce9d2bfe607fa19e
|
[
"Apache-2.0"
] | null | null | null |
from util import connect
import time
QUERY_5 = '''
select
round(avg(price), 2)
from
listings
where
neighbourhood = :entry;
'''
def main():
neighbourhood = input("Specify the neighbourhood: ")
task5(neighbourhood)
def task5(neighbourhood: str):
connection = connect()
cursor = connection.cursor()
t_start = time.process_time()
cursor.execute(QUERY_5, {
"entry": neighbourhood
})
t_taken = time.process_time()-t_start
rows = cursor.fetchall()
if len(rows):
print("Average rental cost per night for", neighbourhood+" is:")
for row in rows:
print("$"+"".join(map(str, row)))
else:
print(neighbourhood+" Does not exist in database")
connection.commit()
connection.close()
print("Total time taken: {}s".format(t_taken))
return rows
if __name__ == "__main__":
main()
| 20.311111
| 72
| 0.610503
|
f761c1c9b3c02a9061c67c09ee61ef9728f11fd4
| 8,889
|
py
|
Python
|
apps/pretrained_compound/pretrain_gnns/pretrain_contextpred.py
|
WorldEditors/PaddleHelix
|
7dbe947417538d7478fbab4438905b30c1d709c3
|
[
"Apache-2.0"
] | null | null | null |
apps/pretrained_compound/pretrain_gnns/pretrain_contextpred.py
|
WorldEditors/PaddleHelix
|
7dbe947417538d7478fbab4438905b30c1d709c3
|
[
"Apache-2.0"
] | null | null | null |
apps/pretrained_compound/pretrain_gnns/pretrain_contextpred.py
|
WorldEditors/PaddleHelix
|
7dbe947417538d7478fbab4438905b30c1d709c3
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
#-*-coding:utf-8-*-
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
pretrain context pred
"""
import os
from os.path import join, exists
import sys
import json
import argparse
import numpy as np
import paddle
import paddle.fluid as fluid
from paddle.fluid.incubate.fleet.collective import fleet
"""
Enable static graph mode.
"""
paddle.enable_static()
from pahelix.model_zoo import PreGNNContextpredModel
from pahelix.datasets import load_zinc_dataset
from pahelix.featurizers import PreGNNContextPredFeaturizer
from pahelix.utils.paddle_utils import load_partial_params, get_distributed_optimizer
from pahelix.utils.splitters import RandomSplitter
from pahelix.utils.compound_tools import CompoundConstants
def train(args, exe, train_prog, model, train_dataset, featurizer):
"""
Define the training function according to the given settings, calculate the training loss.
Args:
args,exe,train_prog,model,train_dataset,featurizer;
Returns:
the average of the list loss
"""
data_gen = train_dataset.iter_batch(
batch_size=args.batch_size,
num_workers=args.num_workers,
shuffle=True,
collate_fn=featurizer.collate_fn)
list_loss = []
for batch_id, feed_dict in enumerate(data_gen):
train_loss, = exe.run(train_prog,
feed=feed_dict, fetch_list=[model.loss], return_numpy=False)
list_loss.append(np.array(train_loss).mean())
return np.mean(list_loss)
def evaluate(args, exe, test_prog, model, test_dataset, featurizer):
"""
Define the evaluate function
In the dataset, a proportion of labels are blank. So we use a `valid` tensor
to help eliminate these blank labels in both training and evaluation phase.
"""
data_gen = test_dataset.iter_batch(
batch_size=args.batch_size,
num_workers=args.num_workers,
shuffle=True,
collate_fn=featurizer.collate_fn)
list_loss = []
for batch_id, feed_dict in enumerate(data_gen):
test_loss, = exe.run(test_prog,
feed=feed_dict, fetch_list=[model.loss], return_numpy=False)
list_loss.append(np.array(test_loss).mean())
return np.mean(list_loss)
def main(args):
"""
Call the configuration function of the model, build the model and load data, then start training.
model_config:
a json file with the model configurations,such as dropout rate ,learning rate,num tasks and so on;
context_pooling:
it means the pooling type of context prediction;
PreGNNContextpredModel:
It is an unsupervised pretraining model which use subgraphs to predict their surrounding graph structures. Our goal is to pre-train a GNN so that it maps nodes appearing in similar structural contexts to nearby embeddings.
"""
model_config = json.load(open(args.model_config, 'r'))
if not args.dropout_rate is None:
model_config['dropout_rate'] = args.dropout_rate
model_config['context_pooling'] = args.context_pooling
### build model
train_prog = fluid.Program()
test_prog = fluid.Program()
startup_prog = fluid.Program()
with fluid.program_guard(train_prog, startup_prog):
with fluid.unique_name.guard():
model = PreGNNContextpredModel(model_config)
model.forward()
opt = fluid.optimizer.Adam(learning_rate=args.lr)
if args.distributed:
opt = get_distributed_optimizer(opt)
opt.minimize(model.loss)
with fluid.program_guard(test_prog, fluid.Program()):
with fluid.unique_name.guard():
model = PreGNNContextpredModel(model_config)
model.forward(is_test=True)
"""
Use CUDAPlace for GPU training, or use CPUPlace for CPU training.
"""
place = fluid.CUDAPlace(int(os.environ.get('FLAGS_selected_gpus', 0))) \
if args.use_cuda else fluid.CPUPlace()
exe = fluid.Executor(place)
exe.run(startup_prog)
if not args.init_model is None and not args.init_model == "":
load_partial_params(exe, args.init_model, train_prog)
### load data
"""
PreGNNContextPredFeaturizer:
It is used along with `PreGNNContextPredModel`. It inherits from the super class `Featurizer` which is used for feature extractions. The `Featurizer` has two functions: `gen_features` for converting from a single raw smiles to a single graph data, `collate_fn` for aggregating a sublist of graph data into a big batch.
k is the number of layer,l1 and l2 are the different size of context,usually l1 < l2.
splitter:
split type of the dataset:random,scaffold,random with scaffold. Here is randomsplit.
`ScaffoldSplitter` will firstly order the compounds according to Bemis-Murcko scaffold,
then take the first `frac_train` proportion as the train set, the next `frac_valid` proportion as the valid set
and the rest as the test set. `ScaffoldSplitter` can better evaluate the generalization ability of the model on
out-of-distribution samples. Note that other splitters like `RandomSplitter`, `RandomScaffoldSplitter`
and `IndexSplitter` is also available."
"""
k = model_config['layer_num']
l1 = k - 1
l2 = l1 + args.context_size
featurizer = PreGNNContextPredFeaturizer(
model.substruct_graph_wrapper,
model.context_graph_wrapper,
k, l1, l2)
dataset = load_zinc_dataset(args.data_path, featurizer=featurizer)
splitter = RandomSplitter()
train_dataset, _, test_dataset = splitter.split(
dataset, frac_train=0.9, frac_valid=0, frac_test=0.1)
if args.distributed:
indices = list(range(fleet.worker_index(), len(train_dataset), fleet.worker_num()))
train_dataset = train_dataset[indices]
print("Train/Test num: %s/%s" % (len(train_dataset), len(test_dataset)))
### start train
"""
Load the train function and calculate the train loss and test loss in each epoch.
Here we set the epoch is in range of max epoch,you can change it if you want.
Then we will calculate the train loss ,test loss and print them.
Finally we save the best epoch to the model according to the dataset.
"""
list_test_loss = []
for epoch_id in range(args.max_epoch):
train_loss = train(args, exe, train_prog, model, train_dataset, featurizer)
test_loss = evaluate(args, exe, test_prog, model, test_dataset, featurizer)
if not args.distributed or fleet.worker_index() == 0:
fluid.io.save_params(exe, '%s/epoch%s' % (args.model_dir, epoch_id), train_prog)
list_test_loss.append(test_loss)
print("epoch:%d train/loss:%s" % (epoch_id, train_loss))
print("epoch:%d test/loss:%s" % (epoch_id, test_loss))
if not args.distributed or fleet.worker_index() == 0:
best_epoch_id = np.argmax(list_test_loss)
fluid.io.load_params(exe, '%s/epoch%d' % (args.model_dir, best_epoch_id), train_prog)
fluid.io.save_params(exe, '%s/epoch_best' % (args.model_dir), train_prog)
return list_test_loss[best_epoch_id]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--use_cuda", action='store_true', default=False)
parser.add_argument("--distributed", action='store_true', default=False)
parser.add_argument("--batch_size", type=int, default=256)
parser.add_argument("--num_workers", type=int, default=4)
parser.add_argument("--max_epoch", type=int, default=100)
parser.add_argument("--lr", type=float, default=0.001)
parser.add_argument("--data_path", type=str)
parser.add_argument("--model_config", type=str)
parser.add_argument("--dropout_rate", type=float)
parser.add_argument("--init_model", type=str)
parser.add_argument("--model_dir", type=str)
parser.add_argument("--context_size", type=int, default=3)
parser.add_argument("--context_pooling", type=str, default='average')
args = parser.parse_args()
main(args)
| 40.404545
| 326
| 0.684666
|
be0238b714eca8a49893c84d1c6fda0730388931
| 30,176
|
py
|
Python
|
desktop/core/src/desktop/settings.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | null | null | null |
desktop/core/src/desktop/settings.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | null | null | null |
desktop/core/src/desktop/settings.py
|
yetsun/hue
|
2e48f0cc70e233ee0e1b40733d4b2a18d8836c66
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Django settings for Hue.
#
# Local customizations are done by symlinking a file
# as local_settings.py.
from builtins import map, zip
import datetime
import gc
import json
import logging
import os
import pkg_resources
import sys
import uuid
import django_opentracing
import desktop.redaction
from desktop.lib.paths import get_desktop_root, get_run_root
from desktop.lib.python_util import force_dict_to_strings
from aws.conf import is_enabled as is_s3_enabled
from azure.conf import is_abfs_enabled
if sys.version_info[0] > 2:
from django.utils.translation import gettext_lazy as _
else:
from django.utils.translation import ugettext_lazy as _
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(os.path.dirname(__file__)), '..', '..', '..'))
HUE_DESKTOP_VERSION = pkg_resources.get_distribution("desktop").version or "Unknown"
NICE_NAME = "Hue"
ENV_HUE_PROCESS_NAME = "HUE_PROCESS_NAME"
ENV_DESKTOP_DEBUG = "DESKTOP_DEBUG"
LOGGING_CONFIG = None # We're handling our own logging config. Consider upgrading our logging infra to LOGGING_CONFIG
############################################################
# Part 1: Logging and imports.
############################################################
# Configure debug mode
DEBUG = True
GTEMPLATE_DEBUG = DEBUG
# Start basic logging as soon as possible.
if ENV_HUE_PROCESS_NAME not in os.environ:
_proc = os.path.basename(len(sys.argv) > 1 and sys.argv[1] or sys.argv[0])
os.environ[ENV_HUE_PROCESS_NAME] = _proc
desktop.log.basic_logging(os.environ[ENV_HUE_PROCESS_NAME])
logging.info("Welcome to Hue " + HUE_DESKTOP_VERSION)
# Then we can safely import some more stuff
from desktop import appmanager
from desktop.lib import conf
# Add fancy logging
desktop.log.fancy_logging()
############################################################
# Part 2: Generic Configuration
############################################################
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = [
('de', _('German')),
('en-us', _('English')),
('es', _('Spanish')),
('fr', _('French')),
('ja', _('Japanese')),
('ko', _('Korean')),
('pt', _('Portuguese')),
('pt-br', _('Brazilian Portuguese')),
('zh-CN', _('Simplified Chinese')),
]
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = False
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
############################################################
# Part 3: Django configuration
############################################################
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'desktop', 'libs', 'indexer', 'src', 'indexer', 'static'),
os.path.join(BASE_DIR, 'desktop', 'libs', 'notebook', 'src', 'notebook', 'static'),
os.path.join(BASE_DIR, 'desktop', 'libs', 'liboauth', 'src', 'liboauth', 'static'),
)
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.ManifestStaticFilesStorage'
# For Django admin interface
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'build', 'static')
# List of callables that know how to import templates from various sources.
GTEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader'
)
MIDDLEWARE = [
# The order matters
'desktop.middleware.MetricsMiddleware',
'desktop.middleware.EnsureSafeMethodMiddleware',
'desktop.middleware.AuditLoggingMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'desktop.middleware.ProxyMiddleware',
'desktop.middleware.SpnegoMiddleware',
'desktop.middleware.HueRemoteUserMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django_babel.middleware.LocaleMiddleware',
'desktop.middleware.AjaxMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'desktop.middleware.ContentSecurityPolicyMiddleware',
# Must be after Session, Auth, and Ajax. Before everything else.
'desktop.middleware.LoginAndPermissionMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'desktop.middleware.NotificationMiddleware',
'desktop.middleware.ExceptionMiddleware',
'desktop.middleware.ClusterMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.http.ConditionalGetMiddleware',
#'axes.middleware.FailedLoginMiddleware',
'desktop.middleware.MimeTypeJSFileFixStreamingMiddleware',
'crequest.middleware.CrequestMiddleware',
]
# if os.environ.get(ENV_DESKTOP_DEBUG):
# MIDDLEWARE.append('desktop.middleware.HtmlValidationMiddleware')
# logging.debug("Will try to validate generated HTML.")
ROOT_URLCONF = 'desktop.urls'
# Hue runs its own wsgi applications
WSGI_APPLICATION = None
GTEMPLATE_DIRS = (
get_desktop_root("core/templates"),
)
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.staticfiles',
'django_extensions',
# 'debug_toolbar',
#'south', # database migration tool
# i18n support
'django_babel',
# Desktop injects all the other installed apps into here magically.
'desktop',
# App that keeps track of failed logins.
'axes',
'webpack_loader',
'django_prometheus',
'crequest',
#'django_celery_results',
]
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'desktop/js/bundles/hue/',
'STATS_FILE': os.path.join(BASE_DIR, 'webpack-stats.json')
},
'WORKERS': {
'BUNDLE_DIR_NAME': 'desktop/js/bundles/workers/',
'STATS_FILE': os.path.join(BASE_DIR, 'webpack-stats-workers.json')
},
'LOGIN': {
'BUNDLE_DIR_NAME': 'desktop/js/bundles/login/',
'STATS_FILE': os.path.join(BASE_DIR, 'webpack-stats-login.json')
}
}
LOCALE_PATHS = [
get_desktop_root('core/src/desktop/locale')
]
# Keep default values up to date
GTEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.request',
'django.contrib.messages.context_processors.messages',
# Not default
'desktop.context_processors.app_name',
)
TEMPLATES = [
{
'BACKEND': 'djangomako.backends.MakoBackend',
'DIRS': GTEMPLATE_DIRS,
'NAME': 'mako',
'OPTIONS': {
'context_processors': GTEMPLATE_CONTEXT_PROCESSORS,
'loaders': GTEMPLATE_LOADERS,
},
},
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
get_desktop_root("core/templates/debug_toolbar"),
get_desktop_root("core/templates/djangosaml2"),
],
'NAME': 'django',
'APP_DIRS': True,
},
]
# Desktop doesn't use an auth profile module, because
# because it doesn't mesh very well with the notion
# of having multiple apps. If your app needs
# to store data related to users, it should
# manage its own table with an appropriate foreign key.
AUTH_PROFILE_MODULE = None
LOGIN_REDIRECT_URL = "/"
LOGOUT_REDIRECT_URL = "/" # For djangosaml2 bug.
PYLINTRC = get_run_root('.pylintrc')
# Custom CSRF Failure View
CSRF_FAILURE_VIEW = 'desktop.views.csrf_failure'
############################################################
# Part 4: Installation of apps
############################################################
_config_dir = os.getenv("HUE_CONF_DIR", get_desktop_root("conf"))
# Libraries are loaded and configured before the apps
appmanager.load_libs()
_lib_conf_modules = [dict(module=app.conf, config_key=None) for app in appmanager.DESKTOP_LIBS if app.conf is not None]
LOCALE_PATHS.extend([app.locale_path for app in appmanager.DESKTOP_LIBS])
# Load desktop config
_desktop_conf_modules = [dict(module=desktop.conf, config_key=None)]
conf.initialize(_desktop_conf_modules, _config_dir)
# Register the redaction filters into the root logger as soon as possible.
desktop.redaction.register_log_filtering(desktop.conf.get_redaction_policy())
# Activate l10n
# Install apps
appmanager.load_apps(desktop.conf.APP_BLACKLIST.get())
for app in appmanager.DESKTOP_APPS:
INSTALLED_APPS.extend(app.django_apps)
LOCALE_PATHS.append(app.locale_path)
logging.debug("Installed Django modules: %s" % ",".join(map(str, appmanager.DESKTOP_MODULES)))
# Load app configuration
_app_conf_modules = [dict(module=app.conf, config_key=app.config_key) for app in appmanager.DESKTOP_APPS if app.conf is not None]
conf.initialize(_lib_conf_modules, _config_dir)
conf.initialize(_app_conf_modules, _config_dir)
# Now that we've loaded the desktop conf, set the django DEBUG mode based on the conf.
DEBUG = desktop.conf.DJANGO_DEBUG_MODE.get()
GTEMPLATE_DEBUG = DEBUG
if DEBUG: # For simplification, force all DEBUG when django_debug_mode is True and re-apply the loggers
os.environ[ENV_DESKTOP_DEBUG] = 'True'
desktop.log.basic_logging(os.environ[ENV_HUE_PROCESS_NAME])
desktop.log.fancy_logging()
############################################################
# Part 4a: Django configuration that requires bound Desktop
# configs.
############################################################
if desktop.conf.ENABLE_ORGANIZATIONS.get():
AUTH_USER_MODEL = 'useradmin.OrganizationUser'
MIGRATION_MODULES = {
'beeswax': 'beeswax.org_migrations',
'useradmin': 'useradmin.org_migrations',
'desktop': 'desktop.org_migrations',
}
# Configure allowed hosts
ALLOWED_HOSTS = desktop.conf.ALLOWED_HOSTS.get()
X_FRAME_OPTIONS = desktop.conf.X_FRAME_OPTIONS.get()
# Configure admins
ADMINS = []
for admin in desktop.conf.DJANGO_ADMINS.get():
admin_conf = desktop.conf.DJANGO_ADMINS[admin]
if 'name' in admin_conf.bind_to and 'email' in admin_conf.bind_to:
ADMINS.append(((admin_conf.NAME.get(), admin_conf.EMAIL.get())))
ADMINS = tuple(ADMINS)
MANAGERS = ADMINS
SERVER_EMAIL = desktop.conf.DJANGO_SERVER_EMAIL.get()
EMAIL_BACKEND = desktop.conf.DJANGO_EMAIL_BACKEND.get()
EMAIL_SUBJECT_PREFIX = 'Hue %s - ' % desktop.conf.CLUSTER_ID.get()
# Permissive CORS
if desktop.conf.CORS_ENABLED.get():
INSTALLED_APPS.append('corsheaders')
MIDDLEWARE.insert(0, 'corsheaders.middleware.CorsMiddleware')
MIDDLEWARE.remove('django.middleware.csrf.CsrfViewMiddleware')
if sys.version_info[0] > 2:
CORS_ALLOW_ALL_ORIGINS = True
else:
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True # For when cookie auth
SESSION_COOKIE_SAMESITE = None
# Configure database
if os.getenv('DESKTOP_DB_CONFIG'):
conn_string = os.getenv('DESKTOP_DB_CONFIG')
logging.debug("DESKTOP_DB_CONFIG SET: %s" % (conn_string))
default_db = dict(
list(
zip(["ENGINE", "NAME", "TEST_NAME", "USER", "PASSWORD", "HOST", "PORT"], conn_string.split(':'))
)
)
default_db['NAME'] = default_db['NAME'].replace('#', ':') # For is_db_alive command
else:
test_name = os.environ.get('DESKTOP_DB_TEST_NAME', get_desktop_root('desktop-test.db'))
logging.debug("DESKTOP_DB_TEST_NAME SET: %s" % test_name)
test_user = os.environ.get('DESKTOP_DB_TEST_USER', 'hue_test')
logging.debug("DESKTOP_DB_TEST_USER SET: %s" % test_user)
default_db = {
"ENGINE": desktop.conf.DATABASE.ENGINE.get(),
"NAME": desktop.conf.DATABASE.NAME.get(),
"USER": desktop.conf.DATABASE.USER.get(),
"SCHEMA": desktop.conf.DATABASE.SCHEMA.get(),
"PASSWORD": desktop.conf.get_database_password(),
"HOST": desktop.conf.DATABASE.HOST.get(),
"PORT": str(desktop.conf.DATABASE.PORT.get()),
"OPTIONS": force_dict_to_strings(desktop.conf.DATABASE.OPTIONS.get()),
# DB used for tests
"TEST_NAME": test_name,
"TEST_USER": test_user,
# Wrap each request in a transaction.
"ATOMIC_REQUESTS": True,
"CONN_MAX_AGE": desktop.conf.DATABASE.CONN_MAX_AGE.get(),
}
DATABASES = {
'default': default_db
}
if desktop.conf.QUERY_DATABASE.HOST.get():
DATABASES['query'] = {
'ENGINE': desktop.conf.QUERY_DATABASE.ENGINE.get(),
'HOST': desktop.conf.QUERY_DATABASE.HOST.get(),
'NAME': desktop.conf.QUERY_DATABASE.NAME.get(),
'USER': desktop.conf.QUERY_DATABASE.USER.get(),
'PASSWORD': desktop.conf.QUERY_DATABASE.PASSWORD.get(),
'OPTIONS': desktop.conf.QUERY_DATABASE.OPTIONS.get(),
'PORT': desktop.conf.QUERY_DATABASE.PORT.get(),
"SCHEMA": desktop.conf.QUERY_DATABASE.SCHEMA.get(),
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', # TODO: Parameterize here for all the caches
'LOCATION': 'unique-hue'
},
'axes_cache': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
},
}
AXES_CACHE = 'axes_cache'
CACHES_HIVE_DISCOVERY_KEY = 'hive_discovery'
CACHES[CACHES_HIVE_DISCOVERY_KEY] = {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': CACHES_HIVE_DISCOVERY_KEY
}
CACHES_CELERY_KEY = 'celery'
CACHES_CELERY_QUERY_RESULT_KEY = 'celery_query_results'
if desktop.conf.TASK_SERVER.ENABLED.get():
CACHES[CACHES_CELERY_KEY] = json.loads(desktop.conf.TASK_SERVER.EXECUTION_STORAGE.get())
if desktop.conf.TASK_SERVER.RESULT_CACHE.get():
CACHES[CACHES_CELERY_QUERY_RESULT_KEY] = json.loads(desktop.conf.TASK_SERVER.RESULT_CACHE.get())
# Configure sessions
SESSION_COOKIE_NAME = desktop.conf.SESSION.COOKIE_NAME.get()
SESSION_COOKIE_AGE = desktop.conf.SESSION.TTL.get()
SESSION_COOKIE_SECURE = desktop.conf.SESSION.SECURE.get()
SESSION_EXPIRE_AT_BROWSER_CLOSE = desktop.conf.SESSION.EXPIRE_AT_BROWSER_CLOSE.get()
# HTTP only
SESSION_COOKIE_HTTPONLY = desktop.conf.SESSION.HTTP_ONLY.get()
CSRF_COOKIE_AGE = None if desktop.conf.SESSION.CSRF_COOKIE_AGE.get() == 0 else desktop.conf.SESSION.CSRF_COOKIE_AGE.get()
CSRF_COOKIE_SECURE = desktop.conf.SESSION.SECURE.get()
CSRF_COOKIE_HTTPONLY = desktop.conf.SESSION.HTTP_ONLY.get()
CSRF_COOKIE_NAME = 'csrftoken'
TRUSTED_ORIGINS = []
if desktop.conf.SESSION.TRUSTED_ORIGINS.get():
TRUSTED_ORIGINS += desktop.conf.SESSION.TRUSTED_ORIGINS.get()
# This is required for knox
if desktop.conf.KNOX.KNOX_PROXYHOSTS.get(): # The hosts provided here don't have port. Add default knox port
if desktop.conf.KNOX.KNOX_PORTS.get():
hostport = []
ports = [ # In case the ports are in hostname
host.split(':')[1] for host in desktop.conf.KNOX.KNOX_PROXYHOSTS.get() if len(host.split(':')) > 1
]
for port in ports + desktop.conf.KNOX.KNOX_PORTS.get():
if port == '80':
port = '' # Default port needs to be empty
else:
port = ':' + port
hostport += [host.split(':')[0] + port for host in desktop.conf.KNOX.KNOX_PROXYHOSTS.get()]
TRUSTED_ORIGINS += hostport
else:
TRUSTED_ORIGINS += desktop.conf.KNOX.KNOX_PROXYHOSTS.get()
if TRUSTED_ORIGINS:
CSRF_TRUSTED_ORIGINS = TRUSTED_ORIGINS
SECURE_HSTS_SECONDS = desktop.conf.SECURE_HSTS_SECONDS.get()
SECURE_HSTS_INCLUDE_SUBDOMAINS = desktop.conf.SECURE_HSTS_INCLUDE_SUBDOMAINS.get()
SECURE_CONTENT_TYPE_NOSNIFF = desktop.conf.SECURE_CONTENT_TYPE_NOSNIFF.get()
SECURE_BROWSER_XSS_FILTER = desktop.conf.SECURE_BROWSER_XSS_FILTER.get()
SECURE_SSL_REDIRECT = desktop.conf.SECURE_SSL_REDIRECT.get()
SECURE_SSL_HOST = desktop.conf.SECURE_SSL_HOST.get()
SECURE_REDIRECT_EXEMPT = desktop.conf.SECURE_REDIRECT_EXEMPT.get()
# django-nose test specifics
TEST_RUNNER = 'desktop.lib.test_runners.HueTestRunner'
# Turn off cache middleware
if 'test' in sys.argv:
CACHE_MIDDLEWARE_SECONDS = 0
# Limit Nose coverage to Hue apps
NOSE_ARGS = [
'--cover-package=%s' % ','.join([app.name for app in appmanager.DESKTOP_APPS + appmanager.DESKTOP_LIBS]),
'--no-path-adjustment',
'--traverse-namespace'
]
TIME_ZONE = desktop.conf.TIME_ZONE.get()
if desktop.conf.DEMO_ENABLED.get():
AUTHENTICATION_BACKENDS = ('desktop.auth.backend.DemoBackend',)
else:
AUTHENTICATION_BACKENDS = tuple(desktop.conf.AUTH.BACKEND.get())
EMAIL_HOST = desktop.conf.SMTP.HOST.get()
EMAIL_PORT = desktop.conf.SMTP.PORT.get()
EMAIL_HOST_USER = desktop.conf.SMTP.USER.get()
EMAIL_HOST_PASSWORD = desktop.conf.get_smtp_password()
EMAIL_USE_TLS = desktop.conf.SMTP.USE_TLS.get()
DEFAULT_FROM_EMAIL = desktop.conf.SMTP.DEFAULT_FROM.get()
if EMAIL_BACKEND == 'sendgrid_backend.SendgridBackend':
SENDGRID_API_KEY = desktop.conf.get_smtp_password()
SENDGRID_SANDBOX_MODE_IN_DEBUG = DEBUG
if desktop.conf.has_channels():
INSTALLED_APPS.append('channels')
ASGI_APPLICATION = 'desktop.routing.application'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
'hosts': [(desktop.conf.WEBSOCKETS.LAYER_HOST.get(), desktop.conf.WEBSOCKETS.LAYER_PORT.get())],
},
},
}
# Used for securely creating sessions. Should be unique and not shared with anybody.
# Changing auth backends will invalidate all open sessions.
SECRET_KEY = desktop.conf.get_secret_key()
if SECRET_KEY:
SECRET_KEY += str(AUTHENTICATION_BACKENDS)
else:
SECRET_KEY = str(uuid.uuid4())
# Axes
AXES_LOGIN_FAILURE_LIMIT = desktop.conf.AUTH.LOGIN_FAILURE_LIMIT.get()
AXES_LOCK_OUT_AT_FAILURE = desktop.conf.AUTH.LOGIN_LOCK_OUT_AT_FAILURE.get()
AXES_COOLOFF_TIME = None
if desktop.conf.AUTH.LOGIN_COOLOFF_TIME.get() and desktop.conf.AUTH.LOGIN_COOLOFF_TIME.get() != 0:
AXES_COOLOFF_TIME = desktop.conf.AUTH.LOGIN_COOLOFF_TIME.get()
AXES_USE_USER_AGENT = desktop.conf.AUTH.LOGIN_LOCK_OUT_USE_USER_AGENT.get()
AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP = desktop.conf.AUTH.LOGIN_LOCK_OUT_BY_COMBINATION_USER_AND_IP.get()
AXES_BEHIND_REVERSE_PROXY = desktop.conf.AUTH.BEHIND_REVERSE_PROXY.get()
AXES_REVERSE_PROXY_HEADER = desktop.conf.AUTH.REVERSE_PROXY_HEADER.get()
LOGIN_URL = '/hue/accounts/login'
# SAML
SAML_AUTHENTICATION = 'libsaml.backend.SAML2Backend' in AUTHENTICATION_BACKENDS
if SAML_AUTHENTICATION:
from libsaml.saml_settings import *
INSTALLED_APPS.append('libsaml')
LOGIN_URL = '/saml2/login/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# Middleware classes.
for middleware in desktop.conf.MIDDLEWARE.get():
MIDDLEWARE.append(middleware)
# OpenID Connect
def is_oidc_configured():
return 'desktop.auth.backend.OIDCBackend' in AUTHENTICATION_BACKENDS
if is_oidc_configured():
INSTALLED_APPS.append('mozilla_django_oidc')
if 'desktop.auth.backend.AllowFirstUserDjangoBackend' not in AUTHENTICATION_BACKENDS:
# when multi-backend auth, standard login URL '/hue/accounts/login' is used.
LOGIN_URL = '/oidc/authenticate/'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
MIDDLEWARE.append('mozilla_django_oidc.middleware.SessionRefresh')
OIDC_RENEW_ID_TOKEN_EXPIRY_SECONDS = 15 * 60
OIDC_RP_SIGN_ALGO = 'RS256'
OIDC_RP_CLIENT_ID = desktop.conf.OIDC.OIDC_RP_CLIENT_ID.get()
OIDC_RP_CLIENT_SECRET = desktop.conf.OIDC.OIDC_RP_CLIENT_SECRET.get()
OIDC_OP_AUTHORIZATION_ENDPOINT = desktop.conf.OIDC.OIDC_OP_AUTHORIZATION_ENDPOINT.get()
OIDC_OP_TOKEN_ENDPOINT = desktop.conf.OIDC.OIDC_OP_TOKEN_ENDPOINT.get()
OIDC_OP_USER_ENDPOINT = desktop.conf.OIDC.OIDC_OP_USER_ENDPOINT.get()
OIDC_RP_IDP_SIGN_KEY = desktop.conf.OIDC.OIDC_RP_IDP_SIGN_KEY.get()
OIDC_OP_JWKS_ENDPOINT = desktop.conf.OIDC.OIDC_OP_JWKS_ENDPOINT.get()
OIDC_VERIFY_SSL = desktop.conf.OIDC.OIDC_VERIFY_SSL.get()
LOGIN_REDIRECT_URL = desktop.conf.OIDC.LOGIN_REDIRECT_URL.get()
LOGOUT_REDIRECT_URL = desktop.conf.OIDC.LOGOUT_REDIRECT_URL.get()
LOGIN_REDIRECT_URL_FAILURE = desktop.conf.OIDC.LOGIN_REDIRECT_URL_FAILURE.get()
OIDC_STORE_ACCESS_TOKEN = True
OIDC_STORE_ID_TOKEN = True
OIDC_STORE_REFRESH_TOKEN = True
OIDC_CREATE_USER = desktop.conf.OIDC.CREATE_USERS_ON_LOGIN.get()
OIDC_USERNAME_ATTRIBUTE = desktop.conf.OIDC.OIDC_USERNAME_ATTRIBUTE.get()
# OAuth
OAUTH_AUTHENTICATION = 'liboauth.backend.OAuthBackend' in AUTHENTICATION_BACKENDS
if OAUTH_AUTHENTICATION:
INSTALLED_APPS.append('liboauth')
LOGIN_URL = '/oauth/accounts/login'
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# URL Redirection white list.
if desktop.conf.REDIRECT_WHITELIST.get():
MIDDLEWARE.append('desktop.middleware.EnsureSafeRedirectURLMiddleware')
# Enable X-Forwarded-Host header if the load balancer requires it
USE_X_FORWARDED_HOST = desktop.conf.USE_X_FORWARDED_HOST.get()
# Support HTTPS load-balancing
if desktop.conf.SECURE_PROXY_SSL_HEADER.get():
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Add last activity tracking and idle session timeout
if 'useradmin' in [app.name for app in appmanager.DESKTOP_APPS]:
MIDDLEWARE.append('useradmin.middleware.LastActivityMiddleware')
if desktop.conf.SESSION.CONCURRENT_USER_SESSION_LIMIT.get():
MIDDLEWARE.append('useradmin.middleware.ConcurrentUserSessionMiddleware')
LOAD_BALANCER_COOKIE = 'ROUTEID'
################################################################
# Register file upload handlers
# This section must go after the desktop lib modules are loaded
################################################################
# Insert our custom upload handlers
file_upload_handlers = [
'hadoop.fs.upload.HDFSfileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
if is_s3_enabled():
file_upload_handlers.insert(0, 'aws.s3.upload.S3FileUploadHandler')
if is_abfs_enabled():
file_upload_handlers.insert(0, 'azure.abfs.upload.ABFSFileUploadHandler')
FILE_UPLOAD_HANDLERS = tuple(file_upload_handlers)
############################################################
# Necessary for South to not fuzz with tests. Fixed in South 0.7.1
SKIP_SOUTH_TESTS = True
# Set up environment variable so Kerberos libraries look at our private
# ticket cache
os.environ['KRB5CCNAME'] = desktop.conf.KERBEROS.CCACHE_PATH.get()
if not os.getenv('SERVER_SOFTWARE'):
os.environ['SERVER_SOFTWARE'] = 'apache'
# If Hue is configured to use a CACERTS truststore, make sure that the
# REQUESTS_CA_BUNDLE is set so that we can use it when we make external requests.
# This is for the REST calls made by Hue with the requests library.
if desktop.conf.SSL_CACERTS.get() and os.environ.get('REQUESTS_CA_BUNDLE') is None:
os.environ['REQUESTS_CA_BUNDLE'] = desktop.conf.SSL_CACERTS.get()
# Preventing local build failure by not validating the default value of REQUESTS_CA_BUNDLE
if os.environ.get('REQUESTS_CA_BUNDLE') and os.environ.get('REQUESTS_CA_BUNDLE') != desktop.conf.SSL_CACERTS.config.default \
and not os.path.isfile(os.environ['REQUESTS_CA_BUNDLE']):
raise Exception(_('SSL Certificate pointed by REQUESTS_CA_BUNDLE does not exist: %s') % os.environ['REQUESTS_CA_BUNDLE'])
# Instrumentation
if desktop.conf.INSTRUMENTATION.get():
if sys.version_info[0] > 2:
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
else:
gc.set_debug(gc.DEBUG_UNCOLLECTABLE | gc.DEBUG_OBJECTS)
if not desktop.conf.DATABASE_LOGGING.get():
def disable_database_logging():
from django.db.backends.base.base import BaseDatabaseWrapper
from django.db.backends.utils import CursorWrapper
BaseDatabaseWrapper.make_debug_cursor = lambda self, cursor: CursorWrapper(cursor, self)
disable_database_logging()
############################################################
# Searching saved documents in Oracle returns following error:
# DatabaseError: ORA-06502: PL/SQL: numeric or value error: character string buffer too small
# This is caused by DBMS_LOB.SUBSTR(%s, 4000) in Django framework django/db/backends/oracle/base.py
# Django has a ticket for this issue but unfixed: https://code.djangoproject.com/ticket/11580.
# Buffer size 4000 limit the length of field equals or less than 2000 characters.
#
# For performance reasons and to avoid searching in huge fields, we also truncate to a max length
DOCUMENT2_SEARCH_MAX_LENGTH = 2000
# To avoid performace issue, config check will display warning when Document2 over this size
DOCUMENT2_MAX_ENTRIES = 100000
DEBUG_TOOLBAR_PATCH_SETTINGS = False
def show_toolbar(request):
# Here can be used to decide if showing toolbar bases on request object:
# For example, limit IP address by checking request.META['REMOTE_ADDR'], which can avoid setting INTERNAL_IPS.
list_allowed_users = desktop.conf.DJANGO_DEBUG_TOOL_USERS.get()
is_user_allowed = list_allowed_users[0] == '' or request.user.username in list_allowed_users
return DEBUG and desktop.conf.ENABLE_DJANGO_DEBUG_TOOL.get() and is_user_allowed
if DEBUG and desktop.conf.ENABLE_DJANGO_DEBUG_TOOL.get():
idx = MIDDLEWARE.index('desktop.middleware.ClusterMiddleware')
MIDDLEWARE.insert(idx + 1, 'debug_panel.middleware.DebugPanelMiddleware')
INSTALLED_APPS += (
'debug_toolbar',
'debug_panel',
)
DEBUG_TOOLBAR_PANELS = [
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.staticfiles.StaticFilesPanel',
'debug_toolbar.panels.templates.TemplatesPanel',
'debug_toolbar.panels.cache.CachePanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.redirects.RedirectsPanel',
]
DEBUG_TOOLBAR_CONFIG = {
'JQUERY_URL': os.path.join(STATIC_ROOT, 'desktop/ext/js/jquery/jquery-2.2.4.min.js'),
'RESULTS_CACHE_SIZE': 200,
'SHOW_TOOLBAR_CALLBACK': show_toolbar
}
CACHES.update({
'debug-panel': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/var/tmp/debug-panel-cache',
'OPTIONS': {
'MAX_ENTRIES': 10000
}
}
})
################################################################
# Celery settings
################################################################
if desktop.conf.TASK_SERVER.ENABLED.get() or desktop.conf.TASK_SERVER.BEAT_ENABLED.get():
CELERY_BROKER_URL = desktop.conf.TASK_SERVER.BROKER_URL.get()
CELERY_ACCEPT_CONTENT = ['json']
CELERY_RESULT_BACKEND = desktop.conf.TASK_SERVER.CELERY_RESULT_BACKEND.get()
CELERY_TASK_SERIALIZER = 'json'
CELERYD_OPTS = desktop.conf.TASK_SERVER.RESULT_CELERYD_OPTS.get()
# %n will be replaced with the first part of the nodename.
# CELERYD_LOG_FILE="/var/log/celery/%n%I.log"
# CELERYD_PID_FILE="/var/run/celery/%n.pid"
# CELERY_CREATE_DIRS = 1
# CELERYD_USER = desktop.conf.SERVER_USER.get()
# CELERYD_GROUP = desktop.conf.SERVER_GROUP.get()
if desktop.conf.TASK_SERVER.BEAT_ENABLED.get():
INSTALLED_APPS.append('django_celery_beat')
INSTALLED_APPS.append('timezone_field')
USE_TZ = True
PROMETHEUS_EXPORT_MIGRATIONS = False # Needs to be there even when enable_prometheus is not enabled
if desktop.conf.ENABLE_PROMETHEUS.get():
MIDDLEWARE.insert(0, 'django_prometheus.middleware.PrometheusBeforeMiddleware')
MIDDLEWARE.append('django_prometheus.middleware.PrometheusAfterMiddleware')
if 'mysql' in DATABASES['default']['ENGINE']:
DATABASES['default']['ENGINE'] = DATABASES['default']['ENGINE'].replace('django.db.backends', 'django_prometheus.db.backends')
# enable only when use these metrics: django_cache_get_total, django_cache_hits_total, django_cache_misses_total
# for name, val in list(CACHES.items()):
# val['BACKEND'] = val['BACKEND'].replace('django.core.cache.backends', 'django_prometheus.cache.backends')
################################################################
# OpenTracing settings
################################################################
if desktop.conf.TRACING.ENABLED.get():
OPENTRACING_TRACE_ALL = desktop.conf.TRACING.TRACE_ALL.get()
OPENTRACING_TRACER_CALLABLE = __name__ + '.tracer'
def tracer():
from jaeger_client import Config
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
},
# metrics_factory=PrometheusMetricsFactory(namespace='hue-api'),
service_name='hue-api',
validate=True,
)
return config.initialize_tracer()
OPENTRACING_TRACED_ATTRIBUTES = ['META'] # Only valid if OPENTRACING_TRACE_ALL == True
if desktop.conf.TRACING.TRACE_ALL.get():
MIDDLEWARE.insert(0, 'django_opentracing.OpenTracingMiddleware')
MODULES_TO_PATCH = (
'django.contrib.staticfiles.storage',
'django.core.cache.backends.filebased',
'django.core.cache.utils',
'django.db.backends.utils',
'django.utils.cache',
)
try:
import hashlib
hashlib.md5()
except ValueError:
from desktop.monkey_patches import monkey_patch_md5
monkey_patch_md5(MODULES_TO_PATCH)
| 36.356627
| 130
| 0.727399
|
c97cd8336f7d7980911d0bdea93137e7bc95ba24
| 2,585
|
py
|
Python
|
script/build.py
|
sam444/electron-nodejs
|
efeabfe3effc4503c0852b5dc47ebfdd6c0326a2
|
[
"MIT"
] | 2
|
2018-09-16T02:38:30.000Z
|
2019-01-21T15:09:48.000Z
|
script/build.py
|
sam444/electron-nodejs
|
efeabfe3effc4503c0852b5dc47ebfdd6c0326a2
|
[
"MIT"
] | null | null | null |
script/build.py
|
sam444/electron-nodejs
|
efeabfe3effc4503c0852b5dc47ebfdd6c0326a2
|
[
"MIT"
] | 1
|
2021-06-22T21:00:15.000Z
|
2021-06-22T21:00:15.000Z
|
#!/usr/bin/env python
import argparse
import os
import subprocess
import sys
from lib.config import MIPS64EL_GCC, get_target_arch, build_env
from lib.util import electron_gyp, import_vs_env
CONFIGURATIONS = ['Release', 'Debug']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
VENDOR_DIR = os.path.join(SOURCE_ROOT, 'vendor')
LIBCC_SOURCE_ROOT = os.path.join(SOURCE_ROOT, 'vendor', 'libchromiumcontent')
LIBCC_DIST_MAIN = os.path.join(LIBCC_SOURCE_ROOT, 'dist', 'main')
GCLIENT_DONE = os.path.join(SOURCE_ROOT, '.gclient_done')
def main():
os.chdir(SOURCE_ROOT)
# Update the VS build env.
import_vs_env(get_target_arch())
ninja = os.path.join('vendor', 'depot_tools', 'ninja')
if sys.platform == 'win32':
ninja += '.exe'
args = parse_args()
if args.libcc:
if ('D' not in args.configuration
or not os.path.exists(GCLIENT_DONE)
or not os.path.exists(os.path.join(LIBCC_DIST_MAIN, 'build.ninja'))):
sys.stderr.write('--libcc should only be used when '
'libchromiumcontent was built with bootstrap.py -d '
'--debug_libchromiumcontent' + os.linesep)
sys.exit(1)
script = os.path.join(LIBCC_SOURCE_ROOT, 'script', 'build')
subprocess.check_call([sys.executable, script, '-D', '-t',
get_target_arch()])
subprocess.check_call([ninja, '-C', LIBCC_DIST_MAIN])
env = build_env()
for config in args.configuration:
build_path = os.path.join('out', config[0])
ret = subprocess.call([ninja, '-C', build_path, args.target], env=env)
if ret != 0:
sys.exit(ret)
def parse_args():
parser = argparse.ArgumentParser(description='Build project')
parser.add_argument('-c', '--configuration',
help='Build with Release or Debug configuration',
nargs='+',
default=CONFIGURATIONS,
required=False)
parser.add_argument('-t', '--target',
help='Build specified target',
default=electron_gyp()['project_name%'],
required=False)
parser.add_argument('--libcc',
help=(
'Build libchromiumcontent first. Should be used only '
'when libchromiumcontent as built with boostrap.py '
'-d --debug_libchromiumcontent.'
),
action='store_true', default=False)
return parser.parse_args()
if __name__ == '__main__':
sys.exit(main())
| 34.466667
| 78
| 0.613926
|
3ac5a4bd857064649f6b9ff7383c04ec9b9b7d8b
| 1,814
|
py
|
Python
|
socialism.py
|
CT-Yao/crypto-tools
|
d6d0ae4d744205bb108d8cddde8bc98e8a37567f
|
[
"CC0-1.0"
] | 1
|
2021-10-08T15:06:30.000Z
|
2021-10-08T15:06:30.000Z
|
socialism.py
|
CT-Yao/crypto-tools
|
d6d0ae4d744205bb108d8cddde8bc98e8a37567f
|
[
"CC0-1.0"
] | null | null | null |
socialism.py
|
CT-Yao/crypto-tools
|
d6d0ae4d744205bb108d8cddde8bc98e8a37567f
|
[
"CC0-1.0"
] | null | null | null |
#!/usr/bin/env python3
# File : socialism.py
# Author : Chengtao Yao
# Email : chengtao.yao@outlook.com
# Created Time : 2021/10/10 0:07
# Description :
import random
key_lst = ['富强', '民主', '文明', '和谐',
'自由', '平等', '公正', '法治',
'爱国', '敬业', '诚信', '友善']
def socialism_encode(plaintext):
ciphertext = ""
letters = [x for x in plaintext.encode('utf8')]
for letter in letters:
letter_lst = [int(x, 16) for x in "{:0>2}".format(hex(letter)[2:])]
n_lst = []
for n in letter_lst:
if n < 10:
n_lst.append(n)
else:
if random.uniform(0, 1) > 0.5:
n_lst.append(10)
n_lst.append(n - 10)
else:
n_lst.append(11)
n_lst.append(n - 6)
ciphertext += ''.join([key_lst[x] for x in n_lst])
return ciphertext
def socialism_decode(ciphertext):
n_lst = [key_lst.index(ciphertext[x:x + 2]) for x in range(0, len(ciphertext), 2)]
hex_lst = []
i = 0
while i < len(n_lst):
if n_lst[i] < 10:
hex_lst.append(n_lst[i])
else:
if n_lst[i] == 10:
hex_lst.append(sum(n_lst[i:i + 2]))
elif n_lst[i] == 11:
hex_lst.append(n_lst[i + 1] + 6)
else:
print("[Error]")
i += 1
i += 1
hex_lst = [hex(x)[2:] for x in hex_lst]
new_hex_lst = [int("".join(hex_lst[x:x + 2]), 16) for x in range(0, len(hex_lst), 2)]
plaintext = bytes(new_hex_lst).decode()
return plaintext
def module_test_api():
c = '比赛'
m = socialism_encode(c)
print(m)
c = socialism_decode(m)
print(c)
if __name__ == "__main__":
module_test_api()
| 25.549296
| 89
| 0.495039
|
8ec5e1f39e68f4e59054e241ed4209aa8acc27d4
| 1,487
|
py
|
Python
|
helx/typing.py
|
epignatelli/helx
|
3dbbf228a63b79169e80f890b97db3d7473e956e
|
[
"Apache-2.0"
] | 1
|
2021-07-27T00:03:47.000Z
|
2021-07-27T00:03:47.000Z
|
helx/typing.py
|
epignatelli/helx
|
3dbbf228a63b79169e80f890b97db3d7473e956e
|
[
"Apache-2.0"
] | 10
|
2021-06-16T08:42:25.000Z
|
2021-07-05T08:41:51.000Z
|
helx/typing.py
|
epignatelli/helx
|
3dbbf228a63b79169e80f890b97db3d7473e956e
|
[
"Apache-2.0"
] | null | null | null |
import functools
from typing import Any, Callable, Dict, NamedTuple, Tuple, TypeVar, Union
import jax.numpy as jnp
Key = TypeVar("Key", bound=jnp.ndarray)
Shape = TypeVar("Shape", bound=Tuple[int, ...])
Size = TypeVar("Size", bound=Tuple[int, int])
Params = TypeVar("Params", bound=Any)
# Init = TypeVar("Init", Callable[[Key, Shape], Tuple[Shape, Params]])
# TODO(ep) Error: TypeVar bound type cannot be generic. TODO(ep): add when generics are supported
Init = Callable[[Key, Shape], Tuple[Shape, Params]]
# Apply = TypeVar("Apply", Callable[[Params, jnp.ndarray, Dict], jnp.ndarray])
# TODO(ep) Error: TypeVar bound type cannot be generic.
Apply = Callable[[Params, jnp.ndarray, Dict], jnp.ndarray]
HParams = TypeVar("HParams", bound=NamedTuple)
Batch = Union
State = TypeVar("State", bound=jnp.ndarray)
Observation = TypeVar("Observation", bound=jnp.ndarray)
Action = TypeVar("Action", bound=int)
Reward = TypeVar("Reward", bound=float)
Discount = TypeVar("Discount", bound=float)
TraceDecay = TypeVar("TraceDecay", bound=float)
Logits = TypeVar("Logits", bound=jnp.ndarray)
Value = TypeVar("Value", bound=jnp.ndarray)
Loss = TypeVar("Loss", bound=float)
Return = TypeVar("Return", bound=float)
def factory(cls_maker, T):
"""Type factory decorator"""
@functools.wraps(cls_maker)
def fabricate(*args, **kwargs):
return T(*cls_maker(*args, **kwargs))
return fabricate
def default(type):
return type()
def nameof(type):
return type.__name__
| 30.346939
| 97
| 0.709482
|
84902d7c38ab64190805ed0df814a3ddc1b8c371
| 14,616
|
py
|
Python
|
lib/jnpr/junos/factory/factory_loader.py
|
a-v-popov/py-junos-eznc
|
fc5debc6ff181f7a4c83780b5981dd89394f7c92
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 2
|
2016-02-23T09:49:46.000Z
|
2019-06-18T15:59:01.000Z
|
lib/jnpr/junos/factory/factory_loader.py
|
a-v-popov/py-junos-eznc
|
fc5debc6ff181f7a4c83780b5981dd89394f7c92
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 12
|
2017-11-09T09:49:03.000Z
|
2018-01-08T09:50:54.000Z
|
lib/jnpr/junos/factory/factory_loader.py
|
a-v-popov/py-junos-eznc
|
fc5debc6ff181f7a4c83780b5981dd89394f7c92
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 4
|
2015-05-13T11:05:42.000Z
|
2017-11-09T09:32:07.000Z
|
"""
This file contains the FactoryLoader class that is used to dynamically
create Runstat Table and View objects from a <dict> of data. The <dict> can
originate from any kind of source: YAML, JSON, program. For examples of YAML
refer to the .yml files in this jnpr.junos.op directory.
"""
# stdlib
from copy import deepcopy
import re
from jinja2 import Environment
# locally
from jnpr.junos.factory.factory_cls import *
from jnpr.junos.factory.viewfields import *
__all__ = ["FactoryLoader"]
# internally used shortcuts
_VIEW = FactoryView
_CMDVIEW = FactoryCMDView
_FIELDS = ViewFields
_GET = FactoryOpTable
_TABLE = FactoryTable
_CFGTBL = FactoryCfgTable
_CMDTBL = FactoryCMDTable
_CMDCHILDTBL = FactoryCMDChildTable
class FactoryLoader(object):
"""
Used to load a <dict> of data that contains Table and View definitions.
The primary method is :load(): which will return a <dict> of item-name and
item-class definitions.
If you want to import these definitions directly into your namespace,
(like a module) you would do the following:
loader = FactoryLoader()
catalog = loader.load( <catalog_dict> )
globals().update( catalog )
If you did not want to do this, you can access the items as the catalog.
For example, if your <catalog_dict> contained a Table called MyTable, then
you could do something like:
MyTable = catalog['MyTable']
table = MyTable(dev)
table.get()
...
"""
def __init__(self):
self._catalog_dict = None # YAML data
self._item_optables = [] # list of the get/op-tables
self._item_cfgtables = [] # list of get/cfg-tables
self._item_cmdtables = [] # list of commands with unstructured data o/p
self._item_views = [] # list of views to build
self._item_tables = [] # list of tables to build
self.catalog = {} # catalog of built classes
# -----------------------------------------------------------------------
# Create a View class from YAML definition
# -----------------------------------------------------------------------
def _fieldfunc_True(self, value_rhs):
def true_test(x):
if value_rhs.startswith("regex("):
return True if bool(re.search(value_rhs.strip("regex()"), x)) else False
return x == value_rhs
return true_test
def _fieldfunc_False(self, value_rhs):
def false_test(x):
if value_rhs.startswith("regex("):
return False if bool(re.search(value_rhs.strip("regex()"), x)) else True
return x != value_rhs
return false_test
def _fieldfunc_Search(self, regex_pattern):
def search_field(field_text):
""" Returns the first occurrence of regex_pattern within given field_text."""
match = re.search(regex_pattern, field_text)
if match:
return match.groups()[0]
else:
return None
return search_field
def _add_dictfield(self, fields, f_name, f_dict, kvargs):
""" add a field based on its associated dictionary """
# at present if a field is a <dict> then there is **one
# item** - { the xpath value : the option control }. typically
# the option would be a bultin class type like 'int'
# however, as this framework expands in capability, this
# will be enhaced, yo!
xpath, opt = list(f_dict.items())[0] # get first/only key,value
if opt == "group":
fields.group(f_name, xpath)
return
if "flag" == opt:
opt = "bool" # flag is alias for bool
# first check to see if the option is a built-in Python
# type, most commonly would be 'int' for numbers, like counters
if isinstance(opt, dict):
kvargs.update(opt)
fields.str(f_name, xpath, **kvargs)
return
astype = __builtins__.get(opt) or globals().get(opt)
if astype is not None:
kvargs["astype"] = astype
fields.astype(f_name, xpath, **kvargs)
return
# next check to see if this is a "field-function"
# operator in the form "func=value", like "True=enabled"
if isinstance(opt, str) and opt.find("=") > 0:
field_cmd, value_rhs = opt.split("=")
fn_field = "_fieldfunc_" + field_cmd
if not hasattr(self, fn_field):
raise ValueError("Unknown field-func: '%'" % field_cmd)
kvargs["astype"] = getattr(self, fn_field)(value_rhs)
fields.astype(f_name, xpath, **kvargs)
return
raise RuntimeError("Dont know what to do with field: '%s'" % f_name)
# ---[ END: _add_dictfield ] ---------------------------------------------
def _add_view_fields(self, view_dict, fields_name, fields):
""" add a group of fields to the view """
fields_dict = view_dict[fields_name]
try:
# see if this is a 'fields_<group>' collection, and if so
# then we automatically setup using the group mechanism
mark = fields_name.index("_")
group = {"group": fields_name[mark + 1 :]}
except:
# otherwise, no group, just standard 'fields'
group = {}
for f_name, f_data in fields_dict.items():
# each field could have its own unique set of properties
# so create a kvargs <dict> each time. but copy in the
# groups <dict> (single item) generically.
kvargs = {}
kvargs.update(group)
if isinstance(f_data, dict):
self._add_dictfield(fields, f_name, f_data, kvargs)
continue
if f_data in self._catalog_dict:
# f_data is the table name
cls_tbl = self.catalog.get(f_data, self._build_table(f_data))
fields.table(f_name, cls_tbl)
continue
# if we are here, then it means that the field is a string value
xpath = f_name if f_data is True else f_data
fields.str(f_name, xpath, **kvargs)
def _add_cmd_view_fields(self, view_dict, fields_name, fields):
""" add a group of fields to the view """
fields_dict = view_dict[fields_name]
for f_name, f_data in fields_dict.items():
if f_data in self._catalog_dict:
cls_tbl = self.catalog.get(f_data, self._build_cmdtable(f_data))
fields.table(f_name, cls_tbl)
continue
# if we are here, it means we need to filter fields from textfsm
fields._fields.update({f_name: f_data})
# -------------------------------------------------------------------------
def _build_view(self, view_name):
""" build a new View definition """
if view_name in self.catalog:
return self.catalog[view_name]
view_dict = self._catalog_dict[view_name]
kvargs = {"view_name": view_name}
# if there are field groups, then get that now.
if "groups" in view_dict:
kvargs["groups"] = view_dict["groups"]
# if there are eval, then get that now.
if "eval" in view_dict:
kvargs["eval"] = {}
for key, exp in view_dict["eval"].items():
env = Environment()
kvargs["eval"][key] = env.parse(exp)
# if this view extends another ...
if "extends" in view_dict:
base_cls = self.catalog.get(view_dict["extends"])
# @@@ should check for base_cls is None!
kvargs["extends"] = base_cls
fields = _FIELDS()
fg_list = [name for name in view_dict if name.startswith("fields")]
for fg_name in fg_list:
self._add_view_fields(view_dict, fg_name, fields)
cls = _VIEW(fields.end, **kvargs)
self.catalog[view_name] = cls
return cls
# -------------------------------------------------------------------------
def _build_cmdview(self, view_name):
""" build a new View definition """
if view_name in self.catalog:
return self.catalog[view_name]
view_dict = self._catalog_dict[view_name]
kvargs = {"view_name": view_name}
if "columns" in view_dict:
kvargs["columns"] = view_dict["columns"]
elif "title" in view_dict:
kvargs["title"] = view_dict["title"]
if "regex" in view_dict:
kvargs["regex"] = view_dict["regex"]
if "exists" in view_dict:
kvargs["exists"] = view_dict["exists"]
if "filters" in view_dict:
kvargs["filters"] = view_dict["filters"]
if "eval" in view_dict:
kvargs["eval"] = {}
for key, exp in view_dict["eval"].items():
env = Environment()
kvargs["eval"][key] = env.parse(exp)
fields = _FIELDS()
fg_list = [name for name in view_dict if name.startswith("fields")]
for fg_name in fg_list:
self._add_cmd_view_fields(view_dict, fg_name, fields)
cls = _CMDVIEW(fields.end, **kvargs)
self.catalog[view_name] = cls
return cls
# -----------------------------------------------------------------------
# Create a Get-Table from YAML definition
# -----------------------------------------------------------------------
def _build_optable(self, table_name):
""" build a new Get-Table definition """
if table_name in self.catalog:
return self.catalog[table_name]
tbl_dict = self._catalog_dict[table_name]
kvargs = deepcopy(tbl_dict)
rpc = kvargs.pop("rpc")
kvargs["table_name"] = table_name
if "view" in tbl_dict:
view_name = tbl_dict["view"]
cls_view = self.catalog.get(view_name, self._build_view(view_name))
kvargs["view"] = cls_view
cls = _GET(rpc, **kvargs)
self.catalog[table_name] = cls
return cls
# -----------------------------------------------------------------------
# Create a Get-Table from YAML definition
# -----------------------------------------------------------------------
def _build_cmdtable(self, table_name):
""" build a new command-Table definition """
if table_name in self.catalog:
return self.catalog[table_name]
tbl_dict = self._catalog_dict[table_name]
kvargs = deepcopy(tbl_dict)
if "command" in kvargs:
cmd = kvargs.pop("command")
kvargs["table_name"] = table_name
if "view" in tbl_dict:
view_name = tbl_dict["view"]
cls_view = self.catalog.get(view_name, self._build_cmdview(view_name))
kvargs["view"] = cls_view
cls = _CMDTBL(cmd, **kvargs)
self.catalog[table_name] = cls
return cls
elif "title" in kvargs:
cmd = kvargs.pop("title")
kvargs["table_name"] = table_name
if "view" in tbl_dict:
view_name = tbl_dict["view"]
cls_view = self.catalog.get(view_name, self._build_cmdview(view_name))
kvargs["view"] = cls_view
cls = _CMDCHILDTBL(cmd, **kvargs)
self.catalog[table_name] = cls
return cls
else:
kvargs["table_name"] = table_name
if "view" in tbl_dict:
view_name = tbl_dict["view"]
cls_view = self.catalog.get(view_name, self._build_cmdview(view_name))
kvargs["view"] = cls_view
cls = _CMDCHILDTBL(**kvargs)
self.catalog[table_name] = cls
return cls
# -----------------------------------------------------------------------
# Create a Table class from YAML definition
# -----------------------------------------------------------------------
def _build_table(self, table_name):
""" build a new Table definition """
if table_name in self.catalog:
return self.catalog[table_name]
tbl_dict = self._catalog_dict[table_name]
table_item = tbl_dict.pop("item")
kvargs = deepcopy(tbl_dict)
kvargs["table_name"] = table_name
if "view" in tbl_dict:
view_name = tbl_dict["view"]
cls_view = self.catalog.get(view_name, self._build_view(view_name))
kvargs["view"] = cls_view
cls = _TABLE(table_item, **kvargs)
self.catalog[table_name] = cls
return cls
def _build_cfgtable(self, table_name):
""" build a new Config-Table definition """
if table_name in self.catalog:
return self.catalog[table_name]
tbl_dict = deepcopy(self._catalog_dict[table_name])
if "view" in tbl_dict:
# transpose name to class
view_name = tbl_dict["view"]
tbl_dict["view"] = self.catalog.get(view_name, self._build_view(view_name))
cls = _CFGTBL(table_name, tbl_dict)
self.catalog[table_name] = cls
return cls
# -----------------------------------------------------------------------
# Primary builders ...
# -----------------------------------------------------------------------
def _sortitems(self):
for k, v in self._catalog_dict.items():
if "rpc" in v:
self._item_optables.append(k)
elif "get" in v:
self._item_cfgtables.append(k)
elif "set" in v:
self._item_cfgtables.append(k)
elif "command" in v or "title" in v:
self._item_cmdtables.append(k)
elif "view" in v and "item" in v and v["item"] == "*":
self._item_cmdtables.append(k)
elif "view" in v:
self._item_tables.append(k)
else:
self._item_views.append(k)
def load(self, catalog_dict, envrion={}):
# load the yaml data and extract the item names. these names will
# become the new class definitions
self._catalog_dict = catalog_dict
self._sortitems()
list(map(self._build_optable, self._item_optables))
list(map(self._build_cfgtable, self._item_cfgtables))
list(map(self._build_cmdtable, self._item_cmdtables))
list(map(self._build_table, self._item_tables))
list(map(self._build_view, self._item_views))
return self.catalog
| 36
| 89
| 0.552956
|
d01ed85920df3b4e23c898a9dbb5c8468e22d5b7
| 302
|
py
|
Python
|
microutil/tests/test_segmentation.py
|
Hekstra-Lab/microutil
|
ab3b7b51754bf90ef35d6eea1c7b35cece638f0e
|
[
"BSD-3-Clause"
] | 2
|
2021-07-06T05:31:51.000Z
|
2021-10-05T13:29:59.000Z
|
microutil/tests/test_segmentation.py
|
Hekstra-Lab/microutil
|
ab3b7b51754bf90ef35d6eea1c7b35cece638f0e
|
[
"BSD-3-Clause"
] | 16
|
2021-02-08T22:36:42.000Z
|
2021-11-01T22:36:02.000Z
|
microutil/tests/test_segmentation.py
|
Hekstra-Lab/microutil
|
ab3b7b51754bf90ef35d6eea1c7b35cece638f0e
|
[
"BSD-3-Clause"
] | null | null | null |
from microutil.segmentation import individualize
from .utils import open_zarr
def test_individualize():
input = open_zarr('test-data/individualize/input')
expected = open_zarr('test-data/individualize/expected')
individualize(input, min_distance=10)
assert expected.identical(input)
| 27.454545
| 60
| 0.778146
|
1fec7dd0b2058483397b0933bf66ba5d4055b8a1
| 1,125
|
py
|
Python
|
day2/day2.py
|
jberends/aoc2021
|
063a0b19bb13b3a084f367e7c279afddc4f39113
|
[
"MIT"
] | null | null | null |
day2/day2.py
|
jberends/aoc2021
|
063a0b19bb13b3a084f367e7c279afddc4f39113
|
[
"MIT"
] | null | null | null |
day2/day2.py
|
jberends/aoc2021
|
063a0b19bb13b3a084f367e7c279afddc4f39113
|
[
"MIT"
] | null | null | null |
# uri: https://adventofcode.com/2021/day/2
from ..base import get_input
inputs_string = """forward 5
down 5
forward 8
up 3
down 8
forward 2
"""
file_path = get_input(2, ".txt")
with file_path.open() as fd:
inputs = [row.split(" ") for row in fd.readlines()]
# inputs = [row.split(" ") for row in inputs_string.splitlines()]
pos = 0
depth = 0
for row in inputs:
command, amount = row
if command == "forward":
pos += int(amount)
elif command == "down":
depth += int(amount)
elif command == "up":
depth -= int(amount)
else:
raise ValueError(f"We fell through the if loop: {command=}, {amount=}")
result = pos * depth
print(f"part 1 {result}")
# part 2
pos = 0
depth = 0
aim = 0
for row in inputs:
command, amount = row
if command == "forward":
pos += int(amount)
depth += aim * int(amount)
elif command == "down":
aim += int(amount)
elif command == "up":
aim -= int(amount)
else:
raise ValueError(f"We fell through the if loop: {command=}, {amount=}")
result = pos * depth
print(f"part 2 {result}")
| 19.396552
| 79
| 0.592889
|
d6ffcd5f75ef01f61f45f7cb6a47875f2bfc3e5d
| 919
|
py
|
Python
|
pytorch/autograd.py
|
tianqizhao-louis/nlp-research
|
e513d84a02ecd50a0d48418fca7d33c74cc0c9a4
|
[
"MIT"
] | null | null | null |
pytorch/autograd.py
|
tianqizhao-louis/nlp-research
|
e513d84a02ecd50a0d48418fca7d33c74cc0c9a4
|
[
"MIT"
] | null | null | null |
pytorch/autograd.py
|
tianqizhao-louis/nlp-research
|
e513d84a02ecd50a0d48418fca7d33c74cc0c9a4
|
[
"MIT"
] | null | null | null |
import torch
x = torch.ones(5)
y = torch.zeros(3)
w = torch.randn(5, 3, requires_grad=True)
b = torch.randn(3, requires_grad=True)
z = torch.matmul(x, w)+b
loss = torch.nn.functional.binary_cross_entropy_with_logits(z, y)
print('Gradient function for z =',z.grad_fn)
print('Gradient function for loss =', loss.grad_fn)
loss.backward()
print(w.grad)
print(b.grad)
z = torch.matmul(x, w)+b
print(z.requires_grad)
with torch.no_grad():
z = torch.matmul(x, w)+b
print(z.requires_grad)
z = torch.matmul(x, w)+b
z_det = z.detach()
print(z_det.requires_grad)
inp = torch.eye(5, requires_grad=True)
out = (inp+1).pow(2)
out.backward(torch.ones_like(inp), retain_graph=True)
print("First call\n", inp.grad)
out.backward(torch.ones_like(inp), retain_graph=True)
print("\nSecond call\n", inp.grad)
inp.grad.zero_()
out.backward(torch.ones_like(inp), retain_graph=True)
print("\nCall after zeroing gradients\n", inp.grad)
| 26.257143
| 65
| 0.729053
|
acda14538a13843fcd80cefaf866483fd9420255
| 255
|
py
|
Python
|
DjangoGit/tasks/views.py
|
PlainestAmigo/Web-Programming
|
9e875b32227196e7bea218e0c3e5998b2fc20f19
|
[
"MIT"
] | null | null | null |
DjangoGit/tasks/views.py
|
PlainestAmigo/Web-Programming
|
9e875b32227196e7bea218e0c3e5998b2fc20f19
|
[
"MIT"
] | null | null | null |
DjangoGit/tasks/views.py
|
PlainestAmigo/Web-Programming
|
9e875b32227196e7bea218e0c3e5998b2fc20f19
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
tasks = ["foo", "bar", "baz"]
# Create your views here.
def index(request):
return render(request, "tasks/index.html", {
"tasks": tasks
})
def add(request):
return render(request, "tasks/add.html")
| 23.181818
| 48
| 0.65098
|
c43ca6b0565f368da1fa42ccb5d282b5e70fb396
| 7,399
|
py
|
Python
|
pandas/core/window/expanding.py
|
mtrbean/pandas
|
c0ff67a22df9c18da1172766e313732ed2ab6c30
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2019-02-18T00:47:14.000Z
|
2019-02-18T00:47:14.000Z
|
pandas/core/window/expanding.py
|
mtrbean/pandas
|
c0ff67a22df9c18da1172766e313732ed2ab6c30
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2019-08-18T16:00:45.000Z
|
2019-08-18T16:00:45.000Z
|
pandas/core/window/expanding.py
|
mtrbean/pandas
|
c0ff67a22df9c18da1172766e313732ed2ab6c30
|
[
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2019-12-01T23:32:18.000Z
|
2019-12-01T23:32:18.000Z
|
from textwrap import dedent
from pandas.compat.numpy import function as nv
from pandas.util._decorators import Appender, Substitution
from pandas.core.window.common import _doc_template, _GroupByMixin, _shared_docs
from pandas.core.window.rolling import _Rolling_and_Expanding
class Expanding(_Rolling_and_Expanding):
"""
Provide expanding transformations.
Parameters
----------
min_periods : int, default 1
Minimum number of observations in window required to have a value
(otherwise result is NA).
center : bool, default False
Set the labels at the center of the window.
axis : int or str, default 0
Returns
-------
a Window sub-classed for the particular operation
See Also
--------
rolling : Provides rolling window calculations.
ewm : Provides exponential weighted functions.
Notes
-----
By default, the result is set to the right edge of the window. This can be
changed to the center of the window by setting ``center=True``.
Examples
--------
>>> df = pd.DataFrame({'B': [0, 1, 2, np.nan, 4]})
B
0 0.0
1 1.0
2 2.0
3 NaN
4 4.0
>>> df.expanding(2).sum()
B
0 NaN
1 1.0
2 3.0
3 3.0
4 7.0
"""
_attributes = ["min_periods", "center", "axis"]
def __init__(self, obj, min_periods=1, center=False, axis=0, **kwargs):
super().__init__(obj=obj, min_periods=min_periods, center=center, axis=axis)
@property
def _constructor(self):
return Expanding
def _get_window(self, other=None, **kwargs):
"""
Get the window length over which to perform some operation.
Parameters
----------
other : object, default None
The other object that is involved in the operation.
Such an object is involved for operations like covariance.
Returns
-------
window : int
The window length.
"""
axis = self.obj._get_axis(self.axis)
length = len(axis) + (other is not None) * len(axis)
other = self.min_periods or -1
return max(length, other)
_agg_see_also_doc = dedent(
"""
See Also
--------
DataFrame.expanding.aggregate
DataFrame.rolling.aggregate
DataFrame.aggregate
"""
)
_agg_examples_doc = dedent(
"""
Examples
--------
>>> df = pd.DataFrame(np.random.randn(10, 3), columns=['A', 'B', 'C'])
>>> df
A B C
0 -2.385977 -0.102758 0.438822
1 -1.004295 0.905829 -0.954544
2 0.735167 -0.165272 -1.619346
3 -0.702657 -1.340923 -0.706334
4 -0.246845 0.211596 -0.901819
5 2.463718 3.157577 -1.380906
6 -1.142255 2.340594 -0.039875
7 1.396598 -1.647453 1.677227
8 -0.543425 1.761277 -0.220481
9 -0.640505 0.289374 -1.550670
>>> df.ewm(alpha=0.5).mean()
A B C
0 -2.385977 -0.102758 0.438822
1 -1.464856 0.569633 -0.490089
2 -0.207700 0.149687 -1.135379
3 -0.471677 -0.645305 -0.906555
4 -0.355635 -0.203033 -0.904111
5 1.076417 1.503943 -1.146293
6 -0.041654 1.925562 -0.588728
7 0.680292 0.132049 0.548693
8 0.067236 0.948257 0.163353
9 -0.286980 0.618493 -0.694496
"""
)
@Substitution(
see_also=_agg_see_also_doc,
examples=_agg_examples_doc,
versionadded="",
klass="Series/Dataframe",
axis="",
)
@Appender(_shared_docs["aggregate"])
def aggregate(self, arg, *args, **kwargs):
return super().aggregate(arg, *args, **kwargs)
agg = aggregate
@Substitution(name="expanding")
@Appender(_shared_docs["count"])
def count(self, **kwargs):
return super().count(**kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["apply"])
def apply(self, func, raw=None, args=(), kwargs={}):
return super().apply(func, raw=raw, args=args, kwargs=kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["sum"])
def sum(self, *args, **kwargs):
nv.validate_expanding_func("sum", args, kwargs)
return super().sum(*args, **kwargs)
@Substitution(name="expanding")
@Appender(_doc_template)
@Appender(_shared_docs["max"])
def max(self, *args, **kwargs):
nv.validate_expanding_func("max", args, kwargs)
return super().max(*args, **kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["min"])
def min(self, *args, **kwargs):
nv.validate_expanding_func("min", args, kwargs)
return super().min(*args, **kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["mean"])
def mean(self, *args, **kwargs):
nv.validate_expanding_func("mean", args, kwargs)
return super().mean(*args, **kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["median"])
def median(self, **kwargs):
return super().median(**kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["std"])
def std(self, ddof=1, *args, **kwargs):
nv.validate_expanding_func("std", args, kwargs)
return super().std(ddof=ddof, **kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["var"])
def var(self, ddof=1, *args, **kwargs):
nv.validate_expanding_func("var", args, kwargs)
return super().var(ddof=ddof, **kwargs)
@Substitution(name="expanding")
@Appender(_doc_template)
@Appender(_shared_docs["skew"])
def skew(self, **kwargs):
return super().skew(**kwargs)
_agg_doc = dedent(
"""
Examples
--------
The example below will show an expanding calculation with a window size of
four matching the equivalent function call using `scipy.stats`.
>>> arr = [1, 2, 3, 4, 999]
>>> import scipy.stats
>>> fmt = "{0:.6f}" # limit the printed precision to 6 digits
>>> print(fmt.format(scipy.stats.kurtosis(arr[:-1], bias=False)))
-1.200000
>>> print(fmt.format(scipy.stats.kurtosis(arr, bias=False)))
4.999874
>>> s = pd.Series(arr)
>>> s.expanding(4).kurt()
0 NaN
1 NaN
2 NaN
3 -1.200000
4 4.999874
dtype: float64
"""
)
@Appender(_agg_doc)
@Substitution(name="expanding")
@Appender(_shared_docs["kurt"])
def kurt(self, **kwargs):
return super().kurt(**kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["quantile"])
def quantile(self, quantile, interpolation="linear", **kwargs):
return super().quantile(
quantile=quantile, interpolation=interpolation, **kwargs
)
@Substitution(name="expanding")
@Appender(_doc_template)
@Appender(_shared_docs["cov"])
def cov(self, other=None, pairwise=None, ddof=1, **kwargs):
return super().cov(other=other, pairwise=pairwise, ddof=ddof, **kwargs)
@Substitution(name="expanding")
@Appender(_shared_docs["corr"])
def corr(self, other=None, pairwise=None, **kwargs):
return super().corr(other=other, pairwise=pairwise, **kwargs)
class ExpandingGroupby(_GroupByMixin, Expanding):
"""
Provide a expanding groupby implementation.
"""
@property
def _constructor(self):
return Expanding
| 28.348659
| 84
| 0.605893
|
20e0ef1633e6b97ab69f8d657d87a365bf0f892b
| 8,739
|
py
|
Python
|
minpy/core.py
|
hotpxl/minpy-jit
|
fdf968ebe526f1e3f1d7fd7b5c3595d70a4b851d
|
[
"Apache-2.0"
] | 1
|
2019-09-13T20:23:14.000Z
|
2019-09-13T20:23:14.000Z
|
minpy/core.py
|
hotpxl/minpy-jit
|
fdf968ebe526f1e3f1d7fd7b5c3595d70a4b851d
|
[
"Apache-2.0"
] | null | null | null |
minpy/core.py
|
hotpxl/minpy-jit
|
fdf968ebe526f1e3f1d7fd7b5c3595d70a4b851d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import ast
import copy
import inspect
import textwrap
import functools
import contextlib
def parse_function_definition(func):
source_code = textwrap.dedent(inspect.getsource(func))
function_ast = ast.parse(source_code, mode='exec')
return function_ast
def evaluate_function_definition(function_ast, global_namespace,
closure_parameters, closure_arguments):
function_name = function_ast.body[0].name
evaluation_context = ast.Module(body=[
ast.FunctionDef(
name='evaluation_context',
args=ast.arguments(
args=[
ast.arg(arg=i, annotation=None) for i in closure_parameters
],
vararg=None,
kwonlyargs=[],
kw_defaults=[],
kwarg=None,
defaults=[]),
body=[
function_ast.body[0],
ast.Return(value=ast.Name(id=function_name, ctx=ast.Load()))
],
decorator_list=[],
returns=None)
])
ast.fix_missing_locations(evaluation_context)
local_namespace = {}
exec(
compile(evaluation_context, filename='<ast>', mode='exec'),
global_namespace, local_namespace)
ret = local_namespace['evaluation_context'](*closure_arguments)
return ret
def add_type_tracing(function_ast):
type_traced_function_ast = copy_ast(function_ast)
closure_parameters = []
closure_arguments = []
closure_parameters.append('__type_tracing')
node_cells = []
def type_tracing(expr, node_id):
node_cells[node_id].type = type(expr)
return expr
closure_arguments.append(type_tracing)
class NodeTransformer(ast.NodeTransformer):
def trace_node(self, node):
node = self.generic_visit(node)
# Do not add tracing if it already has type information.
if hasattr(node.stem_node, 'type'):
return node
node_cells.append(node.stem_node)
ret = ast.Call(
func=ast.Name(id='__type_tracing', ctx=ast.Load()),
args=[node, ast.Num(n=len(node_cells) - 1)],
keywords=[])
return ret
# Do not recurse into inner definitions.
def visit_FunctionDef(self, node):
return node
def visit_AsyncFunctionDef(self, node):
return node
def visit_ClassDef(self, node):
return node
def visit_Lambda(self, node):
return node
def visit_BoolOp(self, node):
return self.trace_node(node)
def visit_BinOp(self, node):
return self.trace_node(node)
def visit_UnaryOp(self, node):
return self.trace_node(node)
def visit_IfExp(self, node):
return self.trace_node(node)
def visit_Call(self, node):
return self.trace_node(node)
def visit_Name(self, node):
if not isinstance(node.ctx, ast.Load):
return node
else:
return self.trace_node(node)
type_traced_function_ast.body[0].body = [
NodeTransformer().visit(i)
for i in type_traced_function_ast.body[0].body
]
return (type_traced_function_ast, closure_parameters, closure_arguments)
def add_function_tracing(function_ast):
function_traced_function_ast = copy_ast(function_ast)
closure_parameters = []
closure_arguments = []
closure_parameters.append('__function_tracing')
node_cells = []
def function_tracing(f, node_id):
node_cells[node_id].ref = f
return f
closure_arguments.append(function_tracing)
class NodeTransformer(ast.NodeTransformer):
# Do not recurse into inner definitions.
def visit_FunctionDef(self, node):
return node
def visit_AsyncFunctionDef(self, node):
return node
def visit_ClassDef(self, node):
return node
def visit_Lambda(self, node):
return node
def visit_Call(self, node):
node = self.generic_visit(node)
# Do not add tracing if it already has function information.
if hasattr(node.stem_node, 'ref'):
return node
node_cells.append(node.stem_node)
node.func = ast.Call(
func=ast.Name(id='__function_tracing', ctx=ast.Load()),
args=[node.func, ast.Num(n=len(node_cells) - 1)],
keywords=[])
return node
function_traced_function_ast.body[0].body = [
NodeTransformer().visit(i)
for i in function_traced_function_ast.body[0].body
]
return (function_traced_function_ast, closure_parameters,
closure_arguments)
def pretty_print(node,
annotate_fields=True,
include_attributes=False,
extra_attributes=['type', 'ref'],
indent=' '):
def format(node, level=0):
if isinstance(node, ast.AST):
fields = [(i, format(j, level)) for i, j, in ast.iter_fields(node)]
if include_attributes and node._attributes:
fields.extend([(i, format(getattr(node, i), level))
for i in node._attributes])
for i in extra_attributes:
if hasattr(node, i):
fields.append((i, getattr(node, i).__name__))
return ''.join([
type(node).__name__, '(',
', '.join(('{}={}'.format(*field) for field in fields)
if annotate_fields else (i for _, i in fields)), ')'
])
elif isinstance(node, list):
lines = [indent * (level + 2) + format(i, level + 2) for i in node]
if 0 < len(lines):
return '[\n' + ',\n'.join(lines) + '\n' + indent * (
level + 1) + ']'
else:
return '[]'
else:
return repr(node)
if not isinstance(node, ast.AST):
raise TypeError(
'Expected ast.AST, got {}.'.format(type(node).__name__))
return format(node)
def tree_print(node, extra_attributes=['type', 'ref']):
def tree_print_lines(node):
childs = list(map(tree_print_lines, ast.iter_child_nodes(node)))
fields = list(iter_non_node_children(node))
for i in extra_attributes:
if hasattr(node, i):
fields.append((i, getattr(node, i).__name__))
ret = [
type(node).__name__ + '(' +
', '.join(map(lambda pair: '{}={}'.format(*pair), fields)) + ')'
]
for c in childs[:-1]:
for i, j in enumerate(c):
ret.append(('+--' if i == 0 else '| ') + j)
if 0 < len(childs):
for i, j in enumerate(childs[-1]):
ret.append(('+--' if i == 0 else ' ') + j)
return ret
return '\n'.join(tree_print_lines(node))
def copy_ast(function_ast):
original_nodes = []
class NodeSequencer(ast.NodeVisitor):
def generic_visit(self, node):
original_nodes.append(node)
for i in ast.iter_child_nodes(node):
self.visit(i)
NodeSequencer().visit(function_ast)
new_ast = copy.deepcopy(function_ast)
class NodeTransformer(ast.NodeTransformer):
def generic_visit(self, node):
n = original_nodes.pop(0)
node.original_node = n
node.stem_node = getattr(n, 'stem_node', n)
for i in ast.iter_child_nodes(node):
self.visit(i)
return node
NodeTransformer().visit(new_ast)
return new_ast
def return_on_reentrance(f):
reentry = False
@functools.wraps(f)
def wrapper(arg):
nonlocal reentry
if reentry:
return arg
reentry = True
ret = f(arg)
reentry = False
return ret
@contextlib.contextmanager
def reentrance_guard():
nonlocal reentry
old_reentry, reentry = reentry, True
yield
reentry = False
wrapper.reentrance_guard = reentrance_guard
return wrapper
def iter_non_node_children(node):
for name, field in ast.iter_fields(node):
if not isinstance(field, (ast.AST, list)):
yield name, field
elif isinstance(field, list):
for item in field:
if not isinstance(item, ast.AST):
yield name, item
| 30.771127
| 79
| 0.57581
|
8a2651029dcbc2b4986ae2f14af920e33843f8fe
| 428
|
py
|
Python
|
ics/structures/op_eth_link_mode.py
|
intrepidcs/python_ics
|
7bfa8c2f893763608f9255f9536a2019cfae0c23
|
[
"Unlicense"
] | 45
|
2017-10-17T08:42:08.000Z
|
2022-02-21T16:26:48.000Z
|
ics/structures/op_eth_link_mode.py
|
intrepidcs/python_ics
|
7bfa8c2f893763608f9255f9536a2019cfae0c23
|
[
"Unlicense"
] | 106
|
2017-03-07T21:10:39.000Z
|
2022-03-29T15:32:46.000Z
|
ics/structures/op_eth_link_mode.py
|
intrepidcs/python_ics
|
7bfa8c2f893763608f9255f9536a2019cfae0c23
|
[
"Unlicense"
] | 17
|
2017-04-04T12:30:22.000Z
|
2022-01-28T05:30:25.000Z
|
# This file was auto generated; Do not modify, if you value your sanity!
import ctypes
import enum
class op_eth_link_mode(enum.IntEnum):
"""A ctypes-compatible IntEnum superclass."""
@classmethod
def from_param(cls, obj):
return int(obj)
OPETH_LINK_AUTO = 0
OPETH_LINK_MASTER = enum.auto()
OPETH_LINK_SLAVE = enum.auto()
_opEthLinkMode = op_eth_link_mode
opEthLinkMode = op_eth_link_mode
| 20.380952
| 72
| 0.721963
|
0ce9b49dd83c0f1d0128604b4be41783f71efbaf
| 3,058
|
py
|
Python
|
pyami/projects.py
|
ayush4921/pyami
|
eb296c6dfbbd162239918370cf58d98076b31d5d
|
[
"Apache-2.0"
] | null | null | null |
pyami/projects.py
|
ayush4921/pyami
|
eb296c6dfbbd162239918370cf58d98076b31d5d
|
[
"Apache-2.0"
] | null | null | null |
pyami/projects.py
|
ayush4921/pyami
|
eb296c6dfbbd162239918370cf58d98076b31d5d
|
[
"Apache-2.0"
] | null | null | null |
import logging
logging.warning("loading projects.py")
import os
from util import Util
from constants import PHYSCHEM_RESOURCES, DIAGRAMS_DIR, MINIPROJ, PROJECTS, MINICORPORA
class AmiProjects:
"""project files"""
CCT = "cct"
DIFFPROT = "diffprot"
DISEASE = "disease"
FFML = "ffml"
FFML20 = "ffml20"
LIION10 = "liion10"
OIL186 = "oil186"
OIL26 = "oil26"
WORC_EXPLOSION = "worc_explosion"
WORC_SYNTH = "worc_synth"
# minicorpora
C_ACTIVITY = "activity"
C_INVASIVE = "invasive"
C_PLANT_PART = "plantpart"
C_HYDRODISTIL = "hydrodistil"
logger = logging.getLogger("ami_project")
def __init__(self):
self.create_project_dict()
def create_project_dict(self):
self.project_dict = {}
# in this repo
self.add_with_check(AmiProjects.LIION10, os.path.join(PHYSCHEM_RESOURCES, "liion10"), "Li-ion batteries")
self.add_with_check(AmiProjects.FFML20, os.path.join(DIAGRAMS_DIR, "luke", "ffml20"), "forcefields + ML")
self.add_with_check(AmiProjects.OIL26, os.path.join(PHYSCHEM_RESOURCES, "oil26"), "26 oil plant papers")
self.add_with_check(AmiProjects.CCT, os.path.join(DIAGRAMS_DIR, "satish", "cct"), "steel cooling curves"),
self.add_with_check(AmiProjects.DIFFPROT, os.path.join(DIAGRAMS_DIR, "rahul", "diffprotexp"),
"differential protein expr")
# foreign resources
self.add_with_check(AmiProjects.DISEASE, os.path.join(MINIPROJ, "disease", "1-part"), "disease papers")
self.add_with_check(AmiProjects.OIL186, os.path.join(PROJECTS, "CEVOpen/searches/oil186"), "186 oil plant papers")
self.add_with_check(AmiProjects.WORC_SYNTH, os.path.join(PROJECTS, "worcester", "synthesis"), "chemical syntheses")
self.add_with_check(AmiProjects.WORC_EXPLOSION, os.path.join(PROJECTS, "worcester", "explosion"), "explosion hazards")
# minicorpora
self.add_with_check(AmiProjects.C_ACTIVITY, os.path.join(MINICORPORA, "activity"), "biomedical activities")
self.add_with_check(AmiProjects.C_HYDRODISTIL, os.path.join(MINICORPORA, "hydrodistil"), "hydrodistillation")
self.add_with_check(AmiProjects.C_INVASIVE, os.path.join(MINICORPORA, "invasive"), "invasive plants")
self.add_with_check(AmiProjects.C_PLANT_PART, os.path.join(MINICORPORA, "plantpart"), "plant parts")
def add_with_check(self, key, file, desc=None):
"""checks for existence and adds filename to project_dict
key: unique name for ami_dict , default ami_dict in AmiProjects"""
if not os.path.isdir(file):
self.logger.error("project files not available for ", file)
return
Util.check_exists(file)
if key in self.project_dict:
raise Exception (str(key) + " already exists in project_dict, must be unique")
self.project_dict[key] = AmiProject(file, desc)
class AmiProject:
def __init__(self, dir, desc=None):
self.dir = dir
self.description = desc
| 43.685714
| 126
| 0.686069
|
0d7eddcd928352b3c9a111bc6e57796c19256bcf
| 20,407
|
py
|
Python
|
src/biokbase/narrative/tests/test_appmanager.py
|
Tianhao-Gu/narrative-jupyterlab
|
94a4b4a6bbb583f65ce50c8f8343083aceafff05
|
[
"MIT"
] | 2
|
2019-05-03T10:12:56.000Z
|
2020-10-26T05:35:16.000Z
|
src/biokbase/narrative/tests/test_appmanager.py
|
Tianhao-Gu/narrative-jupyterlab
|
94a4b4a6bbb583f65ce50c8f8343083aceafff05
|
[
"MIT"
] | 9
|
2019-05-19T04:13:55.000Z
|
2022-03-23T19:18:44.000Z
|
src/biokbase/narrative/tests/test_appmanager.py
|
Tianhao-Gu/narrative-jupyterlab
|
94a4b4a6bbb583f65ce50c8f8343083aceafff05
|
[
"MIT"
] | 2
|
2019-03-12T17:41:10.000Z
|
2019-04-24T15:33:50.000Z
|
"""
Tests for the app manager.
"""
from biokbase.narrative.jobs.appmanager import AppManager
import biokbase.narrative.jobs.specmanager as specmanager
import biokbase.narrative.app_util as app_util
from biokbase.narrative.jobs.job import Job
from IPython.display import HTML
import unittest
import mock
from .narrative_mock.mockclients import get_mock_client
import os
from .util import TestConfig
def mock_agent_token(*args, **kwargs):
return dict({
"user": "testuser",
"id": "12345",
"token": "abcde"
})
def mock_run_job(*args, **kwargs):
return "new_job_id"
class AppManagerTestCase(unittest.TestCase):
@classmethod
def setUpClass(self):
config = TestConfig()
self.am = AppManager()
self.good_app_id = config.get('app_tests', 'good_app_id')
self.good_tag = config.get('app_tests', 'good_app_tag')
self.bad_app_id = config.get('app_tests', 'bad_app_id')
self.bad_tag = config.get('app_tests', 'bad_app_tag')
self.test_app_id = config.get('app_tests', 'test_app_id')
self.test_app_params = {
"read_library_names": ["rhodo.art.jgi.reads"],
"output_contigset_name": "rhodo_contigs",
"recipe": "auto",
"assembler": "",
"pipeline": "",
"min_contig_len": None
}
self.test_job_id = config.get('app_tests', 'test_job_id')
self.test_tag = config.get('app_tests', 'test_app_tag')
self.public_ws = config.get('app_tests', 'public_ws_name')
self.ws_id = int(config.get('app_tests', 'public_ws_id'))
self.app_input_ref = config.get('app_tests', 'test_input_ref')
self.batch_app_id = config.get('app_tests', 'batch_app_id')
def test_reload(self):
self.am.reload()
info = self.am.app_usage(self.good_app_id, self.good_tag)
self.assertTrue(info)
def test_app_usage(self):
# good id and good tag
usage = self.am.app_usage(self.good_app_id, self.good_tag)
self.assertTrue(usage)
# bad id
with self.assertRaises(ValueError):
self.am.app_usage(self.bad_app_id)
# bad tag
with self.assertRaises(ValueError):
self.am.app_usage(self.good_app_id, self.bad_tag)
def test_app_usage_html(self):
usage = self.am.app_usage(self.good_app_id, self.good_tag)
self.assertTrue(usage._repr_html_())
def test_app_usage_str(self):
usage = self.am.app_usage(self.good_app_id, self.good_tag)
self.assertTrue(str(usage))
def test_available_apps_good(self):
apps = self.am.available_apps(self.good_tag)
self.assertIsInstance(apps, HTML)
def test_available_apps_bad(self):
with self.assertRaises(ValueError):
self.am.available_apps(self.bad_tag)
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_dry_run_app(self, m, auth):
os.environ['KB_WORKSPACE_ID'] = self.public_ws
output = self.am.run_app(
self.test_app_id,
self.test_app_params,
tag=self.test_tag,
dry_run=True
)
self.assertIsInstance(output, dict)
self.assertEqual(output['app_id'], self.test_app_id)
self.assertIsInstance(output['params'], list)
self.assertIn('method', output)
self.assertIn('service_ver', output)
self.assertIn('meta', output)
self.assertIn('tag', output['meta'])
self.assertIn('wsid', output)
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_good_inputs(self, m, auth):
m.return_value._send_comm_message.return_value = None
os.environ['KB_WORKSPACE_ID'] = self.public_ws
new_job = self.am.run_app(
self.test_app_id,
self.test_app_params,
tag=self.test_tag
)
self.assertIsInstance(new_job, Job)
self.assertEqual(new_job.job_id, self.test_job_id)
self.assertEqual(new_job.app_id, self.test_app_id)
self.assertEqual(new_job.tag, self.test_tag)
self.assertIsNone(new_job.cell_id)
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_from_gui_cell(self, m, auth):
m.return_value._send_comm_message.return_value = None
os.environ['KB_WORKSPACE_ID'] = self.public_ws
self.assertIsNone(self.am.run_app(
self.test_app_id,
self.test_app_params,
tag=self.test_tag,
cell_id="12345"
))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_bad_id(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app(self.bad_app_id, None))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_bad_tag(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app(self.good_app_id,
None,
tag=self.bad_tag))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_bad_version_match(self, m):
# fails because a non-release tag can't be versioned
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app(self.good_app_id,
None,
tag=self.good_tag,
version=">0.0.1"))
# Running an app with missing inputs is now allowed. The app can
# crash if it wants to, it can leave its process behind.
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_missing_inputs(self, m, auth):
m.return_value._send_comm_message.return_value = None
self.assertIsNotNone(self.am.run_app(self.good_app_id,
None,
tag=self.good_tag))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_bad_version(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app(self.good_app_id,
None,
tag="dev",
version="1.0.0"))
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_batch_good_inputs(self, m, auth):
m.return_value._send_comm_message.return_value = None
os.environ['KB_WORKSPACE_ID'] = self.public_ws
new_job = self.am.run_app_batch(
self.test_app_id,
[
self.test_app_params,
self.test_app_params
],
tag=self.test_tag
)
self.assertIsInstance(new_job, Job)
self.assertEqual(new_job.job_id, self.test_job_id)
self.assertEqual(new_job.app_id, self.batch_app_id)
self.assertEqual(new_job.tag, self.test_tag)
self.assertIsNone(new_job.cell_id)
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_batch_gui_cell(self, m, auth):
m.return_value._send_comm_message.return_value = None
os.environ['KB_WORKSPACE_ID'] = self.public_ws
self.assertIsNone(self.am.run_app_batch(
self.test_app_id,
[
self.test_app_params,
self.test_app_params
],
tag=self.test_tag,
cell_id="12345"
))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_batch_bad_id(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app_batch(self.bad_app_id, None))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_batch_bad_tag(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app_batch(self.good_app_id,
None,
tag=self.bad_tag))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_batch_bad_version_match(self, m):
# fails because a non-release tag can't be versioned
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app_batch(self.good_app_id,
None,
tag=self.good_tag,
version=">0.0.1"))
# Running an app with missing inputs is now allowed. The app can
# crash if it wants to, it can leave its process behind.
@mock.patch('biokbase.narrative.jobs.appmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
@mock.patch('biokbase.narrative.jobs.appmanager.auth.get_agent_token', side_effect=mock_agent_token)
def test_run_app_missing_inputs(self, m, auth):
m.return_value._send_comm_message.return_value = None
self.assertIsNotNone(self.am.run_app_batch(self.good_app_id,
None,
tag=self.good_tag))
@mock.patch('biokbase.narrative.jobs.appmanager.JobManager')
def test_run_app_batch_bad_version(self, m):
m.return_value._send_comm_message.return_value = None
self.assertIsNone(self.am.run_app_batch(self.good_app_id,
None,
tag="dev",
version="1.0.0"))
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
def test_app_description(self):
desc = self.am.app_description(self.good_app_id, tag=self.good_tag)
self.assertIsInstance(desc, HTML)
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
def test_app_description_bad_tag(self):
with self.assertRaises(ValueError):
self.am.app_description(self.good_app_id, tag=self.bad_tag)
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
def test_app_description_bad_name(self):
with self.assertRaises(ValueError):
self.am.app_description(self.bad_app_id)
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.specmanager.clients.get', get_mock_client)
def test_validate_params(self):
inputs = {
"reads_tuple": [
{
"input_reads_label": "reads file 1",
"input_reads_obj": "rhodobacterium.art.q20.int.PE.reads",
"input_reads_metadata": {
"key1": "value1"
}
},
{
"input_reads_label": "reads file 2",
"input_reads_obj": "rhodobacterium.art.q10.PE.reads",
"input_reads_metadata": {
"key2": "value2"
}
}
],
"output_object": "MyReadsSet",
"description": "New Reads Set"
}
app_id = "NarrativeTest/test_create_set"
tag = "dev"
prev_ws_id = os.environ.get('KB_WORKSPACE_ID')
os.environ['KB_WORKSPACE_ID'] = self.public_ws
sm = specmanager.SpecManager()
spec = sm.get_spec(app_id, tag=tag)
(params, ws_inputs) = app_util.validate_parameters(app_id, tag, sm.app_params(spec), inputs)
self.assertDictEqual(params, inputs)
self.assertIn('12345/8/1', ws_inputs)
self.assertIn('12345/7/1', ws_inputs)
if prev_ws_id is None:
del(os.environ['KB_WORKSPACE_ID'])
else:
os.environ['KB_WORKSPACE_ID'] = prev_ws_id
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.jobs.specmanager.clients.get', get_mock_client)
@mock.patch('biokbase.narrative.clients.get', get_mock_client)
def test_input_mapping(self):
self.maxDiff = None
inputs = {
"reads_tuple": [
{
"input_reads_label": "reads file 1",
"input_reads_obj": "rhodobacterium.art.q20.int.PE.reads",
"input_reads_metadata": {
"key1": "value1"
}
},
{
"input_reads_label": "reads file 2",
"input_reads_obj": "rhodobacterium.art.q10.PE.reads",
"input_reads_metadata": {
"key2": "value2"
}
}
],
"output_object": "MyReadsSet",
"description": "New Reads Set"
}
app_id = "NarrativeTest/test_create_set"
tag = "dev"
ws_name = self.public_ws
prev_ws_id = os.environ.get('KB_WORKSPACE_ID', None)
os.environ['KB_WORKSPACE_ID'] = ws_name
sm = specmanager.SpecManager()
spec = sm.get_spec(app_id, tag=tag)
spec_params = sm.app_params(spec)
spec_params_map = dict(
(spec_params[i]['id'], spec_params[i])
for i in range(len(spec_params))
)
mapped_inputs = self.am._map_inputs(
spec['behavior']['kb_service_input_mapping'],
inputs,
spec_params_map
)
expected = [{
'output_object_name': 'MyReadsSet',
'data': {
'items': [{
'label': 'reads file 1',
'metadata': {'key1': 'value1'},
'ref': '12345/7/1'
}, {
'label': 'reads file 2',
'metadata': {'key2': 'value2'},
'ref': '12345/8/1'
}],
'description': 'New Reads Set'
},
'workspace': ws_name
}]
self.assertDictEqual(expected[0], mapped_inputs[0])
ref_path = ws_name + '/MyReadsSet; ' + ws_name + "/rhodobacterium.art.q10.PE.reads"
ret = app_util.transform_param_value("resolved-ref", ref_path, None)
self.assertEqual(ret, ws_name + '/MyReadsSet;18836/5/1')
if prev_ws_id is None:
del(os.environ['KB_WORKSPACE_ID'])
else:
os.environ['KB_WORKSPACE_ID'] = prev_ws_id
@mock.patch('biokbase.narrative.jobs.appmanager.specmanager.clients.get', get_mock_client)
def test_generate_input(self):
prefix = 'pre'
suffix = 'suf'
num_symbols = 8
generator = {
'symbols': num_symbols,
'prefix': prefix,
'suffix': suffix
}
rand_str = self.am._generate_input(generator)
self.assertTrue(rand_str.startswith(prefix))
self.assertTrue(rand_str.endswith(suffix))
self.assertEqual(len(rand_str), len(prefix)+len(suffix)+num_symbols)
def test_generate_input_bad(self):
with self.assertRaises(ValueError):
self.am._generate_input({'symbols': 'foo'})
with self.assertRaises(ValueError):
self.am._generate_input({'symbols': -1})
def test_transform_input_good(self):
ws_name = self.public_ws
os.environ['KB_WORKSPACE_ID'] = ws_name
test_data = [
{
'value': 'input_value',
'type': 'ref',
'expected': ws_name + '/' + 'input_value'
},
{
'value': ws_name + "/input_value",
'type': 'ref',
'expected': ws_name + '/' + 'input_value'
},
{
'value': 'input_value',
'type': 'unresolved-ref',
'expected': ws_name + '/' + 'input_value'
},
{
'value': 'rhodobacterium.art.q20.int.PE.reads',
'type': 'resolved-ref',
'expected': '11635/9/1'
},
{
'value': ws_name + "/rhodobacterium.art.q20.int.PE.reads",
'type': 'resolved-ref',
'expected': '11635/9/1'
},
{
'value': None,
'type': 'int',
'expected': None
},
{
'value': '5',
'type': 'int',
'expected': 5
},
{
'value': ['a', 'b', 'c'],
'type': 'list<ref>',
'expected': [ws_name + '/a', ws_name + '/b', ws_name + '/c']
},
{
'value': ['rhodobacterium.art.q20.int.PE.reads',
'rhodobacterium.art.q10.PE.reads'],
'type': 'list<resolved-ref>',
'expected': ['11635/9/1', '11635/10/1']
},
{
'value': 'foo',
'type': 'list<ref>',
'expected': [ws_name + '/foo']
},
{
'value': ['1', '2', 3],
'type': 'list<int>',
'expected': [1, 2, 3]
},
{
'value': 'bar',
'type': None,
'expected': 'bar'
},
{
'value': 'rhodobacterium.art.q20.int.PE.reads',
'type': 'future-default',
'spec': {'is_output': 0, 'allowed_types': ["Some.KnownType"]},
'expected': '11635/9/1'
},
{
'value': [123, 456],
'type': None,
'expected': [123, 456]
},
{
'value': 123,
'type': 'string',
'expected': '123'
},
{
'value': ['one', 'two'],
'type': None,
'spec': {'type': 'textsubdata'},
'expected': "one,two"
},
{
'value': ['one', 'two'],
'type': "list<string>",
'spec': {'type': 'textsubdata'},
'expected': ['one', 'two']
},
{
'value': {'one': 1},
'type': 'string',
'expected': "one=1"
}
]
for test in test_data:
spec = test.get('spec', None)
ret = app_util.transform_param_value(test['type'], test['value'], spec)
self.assertEqual(ret, test['expected'])
del(os.environ['KB_WORKSPACE_ID'])
def test_transform_input_bad(self):
with self.assertRaises(ValueError):
app_util.transform_param_value('foo', 'bar', None)
if __name__ == "__main__":
unittest.main()
| 40.409901
| 104
| 0.563973
|
9a3ecce6e575b63417dd588e31c0f442bdcfaf5d
| 1,638
|
py
|
Python
|
unchaind/sink.py
|
cdanis/unchaind
|
88973d0bb732ebe4074204091562307e34581576
|
[
"MIT"
] | 2
|
2019-01-02T20:43:50.000Z
|
2019-01-28T10:15:13.000Z
|
unchaind/sink.py
|
supakeen/kaart-killbot
|
88973d0bb732ebe4074204091562307e34581576
|
[
"MIT"
] | 40
|
2018-12-26T16:20:57.000Z
|
2019-03-31T13:47:32.000Z
|
unchaind/sink.py
|
supakeen/kaart-killbot
|
88973d0bb732ebe4074204091562307e34581576
|
[
"MIT"
] | 4
|
2018-12-25T22:53:51.000Z
|
2021-02-20T19:54:51.000Z
|
"""Functions to talk to chat programs such as Slack and Discord."""
import json
import logging
from typing import Dict, Any, Optional
from unchaind.http import HTTPSession
log = logging.getLogger(__name__)
async def discord(
notifier: Dict[str, Any],
message: str,
*,
payload: Optional[Dict[str, Any]] = None,
) -> None:
"""Send a Discord message to the configured channel."""
if payload is not None:
http = HTTPSession()
await http.request(
url=notifier["webhook"], method="POST", body=json.dumps(payload)
)
else:
http = HTTPSession()
await http.request(
url=notifier["webhook"],
method="POST",
body=json.dumps({"content": message}),
)
async def console(
notifier: Dict[str, Any],
message: str,
*,
payload: Optional[Dict[str, Any]] = None,
) -> None:
"""Log a message. Intended for debugging use."""
log.info("NOTIFICATION: " + message)
async def slack(
notifier: Dict[str, Any],
message: str,
*,
payload: Optional[Dict[str, Any]] = None,
) -> None:
"""Send a Slack message to the configured channel. If payload was provided,
it's JSONified and used as the body of the request to Slack. Otherwise, message
will be displayed."""
if payload is not None:
http = HTTPSession()
await http.request(
url=notifier["webhook"], method="POST", body=json.dumps(payload)
)
else:
await slack(notifier, message, payload={"text": message})
sinks = {"discord": discord, "console": console, "slack": slack}
| 24.088235
| 84
| 0.611722
|
eebbb05a57322251f7034718c7a9486054e7503d
| 1,829
|
py
|
Python
|
hc2002/plugin/user_data.py
|
biochimia/hc2000
|
ec12df0326f368dde424bab861fc8fd1c673c010
|
[
"Apache-2.0"
] | null | null | null |
hc2002/plugin/user_data.py
|
biochimia/hc2000
|
ec12df0326f368dde424bab861fc8fd1c673c010
|
[
"Apache-2.0"
] | null | null | null |
hc2002/plugin/user_data.py
|
biochimia/hc2000
|
ec12df0326f368dde424bab861fc8fd1c673c010
|
[
"Apache-2.0"
] | null | null | null |
import hc2002.plugin as plugin
import os.path
import email.mime.base
import email.mime.multipart
import email.mime.text
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_magic_to_mime = {
'#!': ('text', 'x-shellscript'),
'#cloud-boothook': ('text', 'cloud-boothook'),
'#cloud-config': ('text', 'cloud-config'),
'#include': ('text', 'x-include-url'),
'#part-handler': ('text', 'part-handler'),
'#upstart-job': ('text', 'upstart-job'),
# TODO: This plugin should not know about file manifests, maybe?
'#manifest': ('text', 'hc2000-manifest'),
}
def _read_file(filename):
with open(filename, 'rb') as f:
filename = os.path.basename(filename)
return f.read(), filename
def _process_entry(entry, filename=None):
if entry.startswith('file:'):
entry, filename = _read_file(entry[5:])
maintype, subtype = ('application', 'octet-stream')
for magic, mime in _magic_to_mime.iteritems():
if entry.startswith(magic):
maintype, subtype = mime
break
if maintype == 'text':
msg = email.mime.text.MIMEText(entry, subtype)
else:
msg = email.mime.base.MIMEBase(maintype, subtype)
msg.set_payload(entry)
if filename:
msg.add_header('Content-Disposition', 'attachment', filename=filename)
else:
msg.add_header('Content-Disposition', 'attachment')
return msg
def apply(instance):
if 'user-data' not in instance \
or not isinstance(instance['user-data'], list):
return
data = email.mime.multipart.MIMEMultipart()
for entry in instance['user-data']:
data.attach(_process_entry(entry))
# Replace user-data with MIME-ified version.
instance['user-data'] = data.as_string()
| 30.483333
| 78
| 0.630399
|
d1fc6bc34e22dd92a597363d8b28e1dd2044d705
| 2,499
|
py
|
Python
|
test/android_protocol_test/mqtt_communicator.py
|
NCLPhD/FedML
|
ffa15262ee963b9c856f34f0b2202f4dfeb3a76b
|
[
"Apache-2.0"
] | null | null | null |
test/android_protocol_test/mqtt_communicator.py
|
NCLPhD/FedML
|
ffa15262ee963b9c856f34f0b2202f4dfeb3a76b
|
[
"Apache-2.0"
] | null | null | null |
test/android_protocol_test/mqtt_communicator.py
|
NCLPhD/FedML
|
ffa15262ee963b9c856f34f0b2202f4dfeb3a76b
|
[
"Apache-2.0"
] | null | null | null |
import json
import uuid
import paho.mqtt.client as mqtt
import fedml
BROKER_HOST = "mqtt.fedml.ai"
BROKER_PORT = 1883
MQTT_KEEPALIVE = 60
EDGE_ID = "EDGE-%s" % uuid.uuid4().hex
MQTT_USER = "admin"
MQTT_PWD = "password"
class EdgeCommunicator(object):
def __init__(self, host=BROKER_HOST, port=BROKER_PORT, client_id=EDGE_ID):
self._host = host
self._port = port
self._client_id = client_id
self._listeners = dict()
# 建立连接
self._client = mqtt.Client(client_id=client_id, clean_session=True)
# 检查hostname的cert认证
# self._client.tls_insecure_set(True)
# 设置认证文件
# self._client.tls_set(trust)
self._client.on_connect = self.on_connect
self._client.on_publish = self.on_publish
self._client.on_disconnect = self.on_disconnect
self._client.on_message = self.on_message
self._client.enable_logger()
self._client.username_pw_set(MQTT_USER, MQTT_PWD)
# 遗言消息定义
_will_msg = {"ID": f"{client_id}", "stat": "Online"}
# 遗言消息 一旦连接到MQTT服务器,遗言消息就会被服务器托管,本客户端凡是非正常断开连接
# 服务器就会将本遗言发送给订阅该遗言消息的客户端告知对方本客户端离线;
self._client.will_set(
"W/topic", payload=json.dumps(_will_msg), qos=0, retain=True
)
self._client.connect(self._host, self._port, MQTT_KEEPALIVE)
self._client.loop_start()
def send_message(self, topic, message):
self._client.publish(topic, payload=message, qos=0)
def on_connect(self, client, userdata, flags, rc):
fedml.logger.info(f"Connected with result code {rc}")
client.subscribe("EDGE/#")
client.subscribe(f"EDGE/{self._client_id}")
def on_message(self, client, userdata, msg):
fedml.logger.info(f"on_message({msg.topic}, {str(msg.payload)})")
_listener = self._listeners.get(msg.topic, None)
if _listener is not None and callable(_listener):
_listener(msg.topic, str(msg.payload))
@staticmethod
def on_publish(client, obj, mid):
fedml.logger.info(f"on_publish mid={mid}")
@staticmethod
def on_disconnect(client, userdata, rc):
fedml.logger.info(f"on_disconnect code={rc}")
def add_message_listener(self, topic, listener):
fedml.logger.info(f"add_message_listener({topic})")
self._listeners[topic] = listener
def remove_message_listener(self, topic):
fedml.logger.info(f"remove_message_listener({topic})")
del self._listeners[topic]
| 34.232877
| 78
| 0.666267
|
c10358dbe91b094ae78ef0880312fbb011039c73
| 1,527
|
py
|
Python
|
magenta/models/basic_rnn/basic_rnn_generator.py
|
Sprog-gle/Magenta
|
55bfd53f8112cf34952e67efc646b98523837f8f
|
[
"Apache-2.0"
] | null | null | null |
magenta/models/basic_rnn/basic_rnn_generator.py
|
Sprog-gle/Magenta
|
55bfd53f8112cf34952e67efc646b98523837f8f
|
[
"Apache-2.0"
] | null | null | null |
magenta/models/basic_rnn/basic_rnn_generator.py
|
Sprog-gle/Magenta
|
55bfd53f8112cf34952e67efc646b98523837f8f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build the generator for the basic RNN model."""
# internal imports
from magenta.models.basic_rnn import basic_rnn_encoder_decoder
from magenta.models.basic_rnn import basic_rnn_graph
from magenta.models.shared import melody_rnn_sequence_generator
from magenta.protobuf import generator_pb2
DEFAULT_ID = 'basic_rnn'
def create_generator(checkpoint,
bundle,
steps_per_quarter=4,
hparams=None,
generator_id=DEFAULT_ID):
melody_encoder_decoder = basic_rnn_encoder_decoder.MelodyEncoderDecoder()
details = generator_pb2.GeneratorDetails(
id=generator_id, description='Basic RNN Generator')
return melody_rnn_sequence_generator.MelodyRnnSequenceGenerator(
details,
checkpoint,
bundle,
melody_encoder_decoder,
basic_rnn_graph.build_graph,
steps_per_quarter,
{} if hparams is None else hparams)
| 37.243902
| 75
| 0.740668
|
460c15724f528a2d2428ec575ad8af192bba1250
| 5,531
|
py
|
Python
|
python/test/perf_metric_test.py
|
jayholman/vmaf
|
0bba4faf68ab89e38314cc596e6908b4fb83984d
|
[
"Apache-2.0"
] | 2
|
2019-07-26T03:49:07.000Z
|
2021-06-17T12:16:46.000Z
|
python/test/perf_metric_test.py
|
jayholman/vmaf
|
0bba4faf68ab89e38314cc596e6908b4fb83984d
|
[
"Apache-2.0"
] | null | null | null |
python/test/perf_metric_test.py
|
jayholman/vmaf
|
0bba4faf68ab89e38314cc596e6908b4fb83984d
|
[
"Apache-2.0"
] | null | null | null |
import sys
import unittest
import numpy as np
import scipy.io
from vmaf.config import VmafConfig
from vmaf.core.perf_metric import RmsePerfMetric, SrccPerfMetric, PccPerfMetric, \
KendallPerfMetric, AucPerfMetric, ResolvingPowerPerfMetric
__copyright__ = "Copyright 2016-2019, Netflix, Inc."
__license__ = "Apache, Version 2.0"
class AggrScorePerfMetricTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_rmse_perf_metric(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 4]
metric = RmsePerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.0)
def test_rmse_perf_metric_enable_mapping(self):
groundtruths = np.arange(0, 1, 0.0001)
predictions = np.arange(0, 1, 0.0001)
metric = RmsePerfMetric(groundtruths, predictions)
result = metric.evaluate(enable_mapping=True)
self.assertAlmostEqual(result['score'], 0.022753642178052261, places=6)
def test_rmse_perf_metric2(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = RmsePerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.5, places=6)
def test_srcc_perf_metric(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = SrccPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 1.0, places=6)
def test_srcc_perf_metric2(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 5, 3]
metric = SrccPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.79999999999999993, places=6)
def test_srcc_perf_metric_enable_mapping(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = SrccPerfMetric(groundtruths, predictions)
result = metric.evaluate(enable_mapping=True)
self.assertAlmostEqual(result['score'], 1.0, places=6)
def test_pcc_perf_metric(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = PccPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.98270762982399085, places=6)
def test_kendall_perf_metric(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = KendallPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 1.0, places=6)
def test_kendall_perf_metric2(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 5, 3]
metric = KendallPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.66666666666666663, places=6)
def test_kendall_perf_metric_enable_mapping(self):
groundtruths = [1, 2, 3, 4]
predictions = [1, 2, 3, 5]
metric = KendallPerfMetric(groundtruths, predictions)
result = metric.evaluate(enable_mapping=True)
self.assertAlmostEqual(result['score'], 1.0, places=6)
@unittest.skipIf(sys.version_info > (3,), reason="TODO python3: check randomness order in py3")
def test_auc_perf_metric(self):
np.random.seed(0)
groundtruths = np.random.normal(0, 1.0, [4, 10]) + np.tile(np.array([1, 2, 3, 4]), [10, 1]).T
predictions = [1, 2, 3, 4]
metric = AucPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 0.95, places=6)
self.assertAlmostEqual(result['AUC_BW'], 0.9166666666666666, places=6)
self.assertAlmostEqual(result['AUC_DS'], 0.95, places=6)
self.assertAlmostEqual(result['CC_0'], 1.0, places=6)
self.assertAlmostEqual(result['THR'], 3.0, places=6)
def test_auc_metrics_performance(self):
mat_filepath = VmafConfig.test_resource_path('data_Toyama.mat')
mat_dict = scipy.io.loadmat(mat_filepath)
results = AucPerfMetric._metrics_performance(mat_dict['objScoDif'], mat_dict['signif'])
self.assertAlmostEqual(np.mean(results['AUC_DS']), 0.69767003960902052, places=6)
self.assertAlmostEqual(np.mean(results['AUC_BW']), 0.94454700301894534, places=6)
self.assertAlmostEqual(np.mean(results['CC_0']), 0.88105386206276415, places=6)
self.assertAlmostEqual(np.mean(results['THR']), 6.2392849606450556, places=6)
def test_respow_perf_metric(self):
np.random.seed(0)
groundtruths = np.random.normal(0, 1.0, [4, 10]) + np.tile(np.array([1, 2, 3, 4]), [10, 1]).T
predictions = [1, 2, 3, 4]
metric = ResolvingPowerPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['resolving_power_95perc'], 1.2176359647113211, places=6)
self.assertAlmostEqual(result['score'], 1.2176359647113211, places=6)
def test_respow_perf_metric2(self):
np.random.seed(0)
groundtruths = np.random.normal(0, 10.0, [100, 30]) + np.tile(np.array(np.arange(100)), [30, 1]).T
predictions = np.arange(100)
metric = ResolvingPowerPerfMetric(groundtruths, predictions)
result = metric.evaluate()
self.assertAlmostEqual(result['score'], 9.0014569671225111, places=6)
| 42.546154
| 106
| 0.661725
|
02defeb64fc7e3b22a4459b787f46e1f65bb010d
| 1,303
|
py
|
Python
|
sections/7.1-comprehensions.py
|
Cygnut/python-tutorial
|
9a8049cf2a8239bfd0fe7e2cb33fdf134b7d7e16
|
[
"MIT"
] | null | null | null |
sections/7.1-comprehensions.py
|
Cygnut/python-tutorial
|
9a8049cf2a8239bfd0fe7e2cb33fdf134b7d7e16
|
[
"MIT"
] | null | null | null |
sections/7.1-comprehensions.py
|
Cygnut/python-tutorial
|
9a8049cf2a8239bfd0fe7e2cb33fdf134b7d7e16
|
[
"MIT"
] | null | null | null |
"""list comprehensions are a very powerful tool, which creates a new list based on another list, in
a single, readable line.
For example, let's say we need to create a list of integers which specify the length of each word in
a certain sentence, but only if the word is not the word "the".
"""
# %%
sentence = "the quick brown fox jumps over the lazy dog"
words = sentence.split()
word_lengths = []
for word in words:
if word != "the":
word_lengths.append(len(word))
print(words)
print(word_lengths)
# %%
# Using a list comprehension, we could simplify this process to this notation:
# %%
sentence = "the quick brown fox jumps over the lazy dog"
words = sentence.split()
word_lengths = [len(word) for word in words if word != "the"]
print(words)
print(word_lengths)
# %%
"""set comprehensions are pretty similar, you just swap [] for {}:
"""
# %%
sentence = "the quick brown fox jumps over the lazy dog"
words = sentence.split()
unique_word_lengths = {len(word) for word in words if word != "the"}
print(unique_word_lengths)
# %%
"""dict comprehensions come along for the ride..
"""
# %%
sentence = "the quick brown fox jumps over the lazy dog"
words = sentence.split()
words_with_lengths = {word: len(word) for word in words if word != "the"}
print(words_with_lengths)
# %%
| 23.267857
| 100
| 0.704528
|
8bc5801a3c2698d299810dbdfd110ddf48e02a55
| 1,044
|
py
|
Python
|
twitoff/db_model.py
|
charlie-may86/twitoff2
|
fbe0bb30d9111d69f91a87be176e60c6c359daec
|
[
"MIT"
] | null | null | null |
twitoff/db_model.py
|
charlie-may86/twitoff2
|
fbe0bb30d9111d69f91a87be176e60c6c359daec
|
[
"MIT"
] | null | null | null |
twitoff/db_model.py
|
charlie-may86/twitoff2
|
fbe0bb30d9111d69f91a87be176e60c6c359daec
|
[
"MIT"
] | null | null | null |
'''SQLAlchemy models for Twitoff'''
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True, nullable=False)
# follower_count = db.Column(db.Integer, nullable=False)
# tweet IDS are ordinal ints, so we can fetch most recent tweets
# newest_tweet_id = db.Column(db.BigInteger, nullable=False)
# called a dunder method prints the user name when you refernce the user
def __repr__(self):
return '<User %r>' % self.username
class Tweet(db.Model):
id = db.Column(db.Integer, primary_key=True)
text = db.Column(db.Unicode(300))
embedding = db.Column(db.PickleType, nullable=False)
user_id = db.Column(db.BigInteger, db.ForeignKey('user.id'), nullable=False)
user = db.relationship('User', backref=db.backref('tweet', lazy=True))
# called a dunder method prints the user name when you refernce the user
def __repr__(self):
return '<Tweet %r>' % self.text
| 37.285714
| 81
| 0.699234
|
41487df334fc555bef564be7e424581fd05c8ad1
| 16,393
|
py
|
Python
|
src/paypal/express/gateway.py
|
aarticianpc/greenpointtrees
|
52afcb20c77fb3ee454ed0d1271a596031cffff6
|
[
"MIT"
] | null | null | null |
src/paypal/express/gateway.py
|
aarticianpc/greenpointtrees
|
52afcb20c77fb3ee454ed0d1271a596031cffff6
|
[
"MIT"
] | null | null | null |
src/paypal/express/gateway.py
|
aarticianpc/greenpointtrees
|
52afcb20c77fb3ee454ed0d1271a596031cffff6
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
import logging
from decimal import Decimal as D
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.http import urlencode
from django.utils import six
from django.utils.translation import ugettext as _
from django.template.defaultfilters import truncatewords, striptags
from localflavor.us import us_states
from . import models, exceptions as express_exceptions
from paypal import gateway
from paypal import exceptions
# PayPal methods
SET_EXPRESS_CHECKOUT = 'SetExpressCheckout'
GET_EXPRESS_CHECKOUT = 'GetExpressCheckoutDetails'
DO_EXPRESS_CHECKOUT = 'DoExpressCheckoutPayment'
DO_CAPTURE = 'DoCapture'
DO_VOID = 'DoVoid'
REFUND_TRANSACTION = 'RefundTransaction'
SALE, AUTHORIZATION, ORDER = 'Sale', 'Authorization', 'Order'
# The latest version of the PayPal Express API can be found here:
# https://developer.paypal.com/docs/classic/release-notes/
API_VERSION = getattr(settings, 'PAYPAL_API_VERSION', '119')
logger = logging.getLogger('paypal.express')
def _format_description(description):
if description:
return truncatewords(striptags(description), 12)
return ''
def _format_currency(amt):
return amt.quantize(D('0.01'))
def _fetch_response(method, extra_params):
"""
Fetch the response from PayPal and return a transaction object
"""
# Build parameter string
params = {
'METHOD': method,
'VERSION': API_VERSION,
'USER': settings.PAYPAL_API_USERNAME,
'PWD': settings.PAYPAL_API_PASSWORD,
'SIGNATURE': settings.PAYPAL_API_SIGNATURE,
}
params.update(extra_params)
if getattr(settings, 'PAYPAL_SANDBOX_MODE', True):
url = 'https://api-3t.sandbox.paypal.com/nvp'
else:
url = 'https://api-3t.paypal.com/nvp'
# Print easy-to-read version of params for debugging
param_str = "\n".join(["%s: %s" % x for x in sorted(params.items())])
logger.debug("Making %s request to %s with params:\n%s", method, url,
param_str)
# Make HTTP request
pairs = gateway.post(url, params)
pairs_str = "\n".join(["%s: %s" % x for x in sorted(pairs.items())
if not x[0].startswith('_')])
logger.debug("Response with params:\n%s", pairs_str)
# Record transaction data - we save this model whether the txn
# was successful or not
txn = models.ExpressTransaction(
method=method,
version=API_VERSION,
ack=pairs['ACK'],
raw_request=pairs['_raw_request'],
raw_response=pairs['_raw_response'],
response_time=pairs['_response_time'],
)
if txn.is_successful:
txn.correlation_id = pairs['CORRELATIONID']
if method == SET_EXPRESS_CHECKOUT:
txn.amount = params['PAYMENTREQUEST_0_AMT']
txn.currency = params['PAYMENTREQUEST_0_CURRENCYCODE']
txn.token = pairs['TOKEN']
elif method == GET_EXPRESS_CHECKOUT:
txn.token = params['TOKEN']
txn.amount = D(pairs['PAYMENTREQUEST_0_AMT'])
txn.currency = pairs['PAYMENTREQUEST_0_CURRENCYCODE']
elif method == DO_EXPRESS_CHECKOUT:
txn.token = params['TOKEN']
txn.amount = D(pairs['PAYMENTINFO_0_AMT'])
txn.currency = pairs['PAYMENTINFO_0_CURRENCYCODE']
else:
# There can be more than one error, each with its own number.
if 'L_ERRORCODE0' in pairs:
txn.error_code = pairs['L_ERRORCODE0']
if 'L_LONGMESSAGE0' in pairs:
txn.error_message = pairs['L_LONGMESSAGE0']
txn.save()
if not txn.is_successful:
msg = "Error %s - %s" % (txn.error_code, txn.error_message)
logger.error(msg)
raise exceptions.PayPalError(msg)
return txn
def set_txn(basket, shipping_methods, currency, return_url, cancel_url, update_url=None,
action=SALE, user=None, user_address=None, shipping_method=None,
shipping_address=None, no_shipping=False, paypal_params=None):
"""
Register the transaction with PayPal to get a token which we use in the
redirect URL. This is the 'SetExpressCheckout' from their documentation.
There are quite a few options that can be passed to PayPal to configure
this request - most are controlled by PAYPAL_* settings.
"""
# Default parameters (taken from global settings). These can be overridden
# and customised using the paypal_params parameter.
_params = {
'CUSTOMERSERVICENUMBER': getattr(
settings, 'PAYPAL_CUSTOMER_SERVICES_NUMBER', None),
'SOLUTIONTYPE': getattr(settings, 'PAYPAL_SOLUTION_TYPE', None),
'LANDINGPAGE': getattr(settings, 'PAYPAL_LANDING_PAGE', None),
'BRANDNAME': getattr(settings, 'PAYPAL_BRAND_NAME', None),
# Display settings
'PAGESTYLE': getattr(settings, 'PAYPAL_PAGESTYLE', None),
'HDRIMG': getattr(settings, 'PAYPAL_HEADER_IMG', None),
'PAYFLOWCOLOR': getattr(settings, 'PAYPAL_PAYFLOW_COLOR', None),
# Think these settings maybe deprecated in latest version of PayPal's
# API
'HDRBACKCOLOR': getattr(settings, 'PAYPAL_HEADER_BACK_COLOR', None),
'HDRBORDERCOLOR': getattr(
settings, 'PAYPAL_HEADER_BORDER_COLOR', None),
'LOCALECODE': getattr(settings, 'PAYPAL_LOCALE', None),
'ALLOWNOTE': getattr(settings, 'PAYPAL_ALLOW_NOTE', True),
'CALLBACKTIMEOUT': getattr(settings, 'PAYPAL_CALLBACK_TIMEOUT', 3)
}
confirm_shipping_addr = getattr(settings, 'PAYPAL_CONFIRM_SHIPPING', None)
if confirm_shipping_addr and not no_shipping:
_params['REQCONFIRMSHIPPING'] = 1
if paypal_params:
_params.update(paypal_params)
locale = _params.get('LOCALECODE', None)
if locale:
valid_choices = ('AU', 'DE', 'FR', 'GB', 'IT', 'ES', 'JP', 'US')
if locale not in valid_choices:
raise ImproperlyConfigured(
"'%s' is not a valid locale code" % locale)
# Boolean values become integers
_params.update((k, int(v)) for k, v in _params.items() if isinstance(v, bool))
# Remove None values
params = dict((k, v) for k, v in _params.items() if v is not None)
# PayPal have an upper limit on transactions. It's in dollars which is a
# fiddly to work with. Lazy solution - only check when dollars are used as
# the PayPal currency.
#canhhs
# if (shipping_method):
# print dir(shipping_method.calculate(basket))
# amount = basket.total_incl_tax + shipping_method.calculate(basket).excl_tax
# else:
# amount = basket.total_incl_tax
amount = basket.total_incl_tax
if currency == 'USD' and amount > 10000:
msg = 'PayPal can only be used for orders up to 10000 USD'
logger.error(msg)
raise express_exceptions.InvalidBasket(_(msg))
if amount <= 0:
msg = 'The basket total is zero so no payment is required'
logger.error(msg)
raise express_exceptions.InvalidBasket(_(msg))
# PAYMENTREQUEST_0_AMT should include tax, shipping and handling
params.update({
'PAYMENTREQUEST_0_AMT': amount,
'PAYMENTREQUEST_0_CURRENCYCODE': currency,
'RETURNURL': return_url,
# 'PAYMENTREQUEST_0_SHIPPINGAMT': shipping_method.calculate(basket).excl_tax,
# 'PAYMENTREQUEST_0_ITEMAMT': amount,
'CANCELURL': cancel_url,
'PAYMENTREQUEST_0_PAYMENTACTION': action,
})
# Add item details
index = 0
for index, line in enumerate(basket.all_lines()):
product = line.product
params['L_PAYMENTREQUEST_0_NAME%d' % index] = product.get_title()
params['L_PAYMENTREQUEST_0_NUMBER%d' % index] = (product.upc if
product.upc else '')
desc = ''
if product.description:
desc = _format_description(product.description)
params['L_PAYMENTREQUEST_0_DESC%d' % index] = desc
# Note, we don't include discounts here - they are handled as separate
# lines - see below
params['L_PAYMENTREQUEST_0_AMT%d' % index] = _format_currency(
line.unit_price_incl_tax)
params['L_PAYMENTREQUEST_0_QTY%d' % index] = line.quantity
# If the order has discounts associated with it, the way PayPal suggests
# using the API is to add a separate item for the discount with the value
# as a negative price. See "Integrating Order Details into the Express
# Checkout Flow"
# https://cms.paypal.com/us/cgi-bin/?cmd=_render-content&content_ID=developer/e_howto_api_ECCustomizing
# Iterate over the 3 types of discount that can occur
for discount in basket.offer_discounts:
index += 1
name = _("Special Offer: %s") % discount['name']
params['L_PAYMENTREQUEST_0_NAME%d' % index] = name
params['L_PAYMENTREQUEST_0_DESC%d' % index] = _format_description(name)
params['L_PAYMENTREQUEST_0_AMT%d' % index] = _format_currency(
-discount['discount'])
params['L_PAYMENTREQUEST_0_QTY%d' % index] = 1
for discount in basket.voucher_discounts:
index += 1
name = "%s (%s)" % (discount['voucher'].name,
discount['voucher'].code)
params['L_PAYMENTREQUEST_0_NAME%d' % index] = name
params['L_PAYMENTREQUEST_0_DESC%d' % index] = _format_description(name)
params['L_PAYMENTREQUEST_0_AMT%d' % index] = _format_currency(
-discount['discount'])
params['L_PAYMENTREQUEST_0_QTY%d' % index] = 1
for discount in basket.shipping_discounts:
index += 1
name = _("Shipping Offer: %s") % discount['name']
params['L_PAYMENTREQUEST_0_NAME%d' % index] = name
params['L_PAYMENTREQUEST_0_DESC%d' % index] = _format_description(name)
params['L_PAYMENTREQUEST_0_AMT%d' % index] = _format_currency(
-discount['discount'])
params['L_PAYMENTREQUEST_0_QTY%d' % index] = 1
# We include tax in the prices rather than separately as that's how it's
# done on most British/Australian sites. Will need to refactor in the
# future no doubt.
# Note that the following constraint must be met
#
# PAYMENTREQUEST_0_AMT = (
# PAYMENTREQUEST_0_ITEMAMT +
# PAYMENTREQUEST_0_TAXAMT +
# PAYMENTREQUEST_0_SHIPPINGAMT +
# PAYMENTREQUEST_0_HANDLINGAMT)
#
# Hence, if tax is to be shown then it has to be aggregated up to the order
# level.
params['PAYMENTREQUEST_0_ITEMAMT'] = _format_currency(
basket.total_incl_tax)
params['PAYMENTREQUEST_0_TAXAMT'] = _format_currency(D('0.00'))
# Instant update callback information
if update_url:
params['CALLBACK'] = update_url
# Contact details and address details - we provide these as it would make
# the PayPal registration process smoother is the user doesn't already have
# an account.
if user:
params['EMAIL'] = user.email
if user_address:
params['SHIPTONAME'] = user_address.name
params['SHIPTOSTREET'] = user_address.line1
params['SHIPTOSTREET2'] = user_address.line2
params['SHIPTOCITY'] = user_address.line4
params['SHIPTOSTATE'] = user_address.state
params['SHIPTOZIP'] = user_address.postcode
params['SHIPTOCOUNTRYCODE'] = user_address.country.iso_3166_1_a2
# Shipping details (if already set) - we override the SHIPTO* fields and
# set a flag to indicate that these can't be altered on the PayPal side.
if shipping_method and shipping_address:
params['ADDROVERRIDE'] = 0
# It's recommend not to set 'confirmed shipping' if supplying the
# shipping address directly.
params['REQCONFIRMSHIPPING'] = 0
params['SHIPTONAME'] = shipping_address.name
params['SHIPTOSTREET'] = shipping_address.line1
params['SHIPTOSTREET2'] = shipping_address.line2
params['SHIPTOCITY'] = shipping_address.line4
params['SHIPTOSTATE'] = shipping_address.state
params['SHIPTOZIP'] = shipping_address.postcode
params['SHIPTOCOUNTRYCODE'] = shipping_address.country.iso_3166_1_a2
# For US addresses, we need to try and convert the state into 2 letter
# code - otherwise we can get a 10736 error as the shipping address and
# zipcode don't match the state. Very silly really.
if params['SHIPTOCOUNTRYCODE'] == 'US':
key = params['SHIPTOSTATE'].lower().strip()
if key in us_states.STATES_NORMALIZED:
params['SHIPTOSTATE'] = us_states.STATES_NORMALIZED[key]
elif no_shipping:
params['NOSHIPPING'] = 1
# Shipping charges
params['PAYMENTREQUEST_0_SHIPPINGAMT'] = _format_currency(D('0.00'))
max_charge = D('0.00')
for index, method in enumerate(shipping_methods):
is_default = index == 0
params['L_SHIPPINGOPTIONISDEFAULT%d' % index] = 'true' if is_default else 'false'
charge = method.calculate(basket).incl_tax
if charge > max_charge:
max_charge = charge
if is_default:
params['PAYMENTREQUEST_0_SHIPPINGAMT'] = _format_currency(charge)
params['PAYMENTREQUEST_0_AMT'] += charge
params['L_SHIPPINGOPTIONNAME%d' % index] = six.text_type(method.name)
params['L_SHIPPINGOPTIONAMOUNT%d' % index] = _format_currency(charge)
# Set shipping charge explicitly if it has been passed
if shipping_method:
charge = shipping_method.calculate(basket).incl_tax
params['PAYMENTREQUEST_0_SHIPPINGAMT'] = _format_currency(charge)
params['PAYMENTREQUEST_0_AMT'] += charge
# Both the old version (MAXAMT) and the new version (PAYMENT...) are needed
# here - think it's a problem with the API.
params['PAYMENTREQUEST_0_MAXAMT'] = _format_currency(amount + max_charge)
params['MAXAMT'] = _format_currency(amount + max_charge)
# Handling set to zero for now - I've never worked on a site that needed a
# handling charge.
params['PAYMENTREQUEST_0_HANDLINGAMT'] = _format_currency(D('0.00'))
# Ensure that the total is formatted correctly.
params['PAYMENTREQUEST_0_AMT'] = _format_currency(
params['PAYMENTREQUEST_0_AMT'])
txn = _fetch_response(SET_EXPRESS_CHECKOUT, params)
# Construct return URL
if getattr(settings, 'PAYPAL_SANDBOX_MODE', True):
url = 'https://www.sandbox.paypal.com/webscr'
else:
url = 'https://www.paypal.com/webscr'
params = (('cmd', '_express-checkout'),
('token', txn.token),)
return '%s?%s' % (url, urlencode(params))
def get_txn(token):
"""
Fetch details of a transaction from PayPal using the token as
an identifier.
"""
return _fetch_response(GET_EXPRESS_CHECKOUT, {'TOKEN': token})
def do_txn(payer_id, token, amount, currency, action=SALE):
"""
DoExpressCheckoutPayment
"""
params = {
'PAYERID': payer_id,
'TOKEN': token,
'PAYMENTREQUEST_0_AMT': amount,
'PAYMENTREQUEST_0_CURRENCYCODE': currency,
'PAYMENTREQUEST_0_PAYMENTACTION': action,
}
return _fetch_response(DO_EXPRESS_CHECKOUT, params)
def do_capture(txn_id, amount, currency, complete_type='Complete',
note=None):
"""
Capture payment from a previous transaction
See https://cms.paypal.com/uk/cgi-bin/?&cmd=_render-content&content_ID=developer/e_howto_api_soap_r_DoCapture
"""
params = {
'AUTHORIZATIONID': txn_id,
'AMT': amount,
'CURRENCYCODE': currency,
'COMPLETETYPE': complete_type,
}
if note:
params['NOTE'] = note
return _fetch_response(DO_CAPTURE, params)
def do_void(txn_id, note=None):
params = {
'AUTHORIZATIONID': txn_id,
}
if note:
params['NOTE'] = note
return _fetch_response(DO_VOID, params)
FULL_REFUND = 'Full'
PARTIAL_REFUND = 'Partial'
def refund_txn(txn_id, is_partial=False, amount=None, currency=None):
params = {
'TRANSACTIONID': txn_id,
'REFUNDTYPE': PARTIAL_REFUND if is_partial else FULL_REFUND,
}
if is_partial:
params['AMT'] = amount
params['CURRENCYCODE'] = currency
return _fetch_response(REFUND_TRANSACTION, params)
| 38.662736
| 113
| 0.666016
|
2d8fd25e267468010456eb172db412dda294a20d
| 1,938
|
py
|
Python
|
douglas/memcache.py
|
willkg/douglas
|
7e46919d0baefecba414f41980cbe9c0529a884e
|
[
"MIT"
] | 1
|
2016-02-12T15:26:24.000Z
|
2016-02-12T15:26:24.000Z
|
douglas/memcache.py
|
willkg/douglas
|
7e46919d0baefecba414f41980cbe9c0529a884e
|
[
"MIT"
] | 1
|
2015-04-20T13:33:39.000Z
|
2015-04-20T13:33:39.000Z
|
douglas/memcache.py
|
willkg/douglas
|
7e46919d0baefecba414f41980cbe9c0529a884e
|
[
"MIT"
] | null | null | null |
# Whether or not to use memcache.
usecache = False
_memcache_cache = {}
def get_cache(scope, key):
return _memcache_cache.setdefault(scope, {})[key]
def set_cache(scope, key, value):
_memcache_cache.setdefault(scope, {})[key] = value
def memcache_decorator(scope, instance=False):
"""Caches function results in memory
This is a pretty classic memoization system for plugins. There's
no expiration of cached data---it just hangs out in memory
forever.
This is great for compiling, but probably not for running as a
CGI/WSGI application.
This is disabled by default. It must be explicitly enabled
to have effect.
Some notes:
1. the function arguments MUST be hashable--no dicts, lists, etc.
2. this probably does not play well with
non-compiling--that should get checked.
3. TODO: the two arguments are poorly named--that should get fixed.
:arg scope: string defining the scope. e.g. 'pycategories'.
:arg instance: whether or not the function being decorated is
bound to an instance (i.e. is the first argument "self" or
"cls"?)
"""
def _memcache(fun):
def _memcache_decorated(*args, **kwargs):
if not usecache:
return fun(*args, **kwargs)
try:
if instance:
mem = args[1:]
else:
mem = args
hash_key = hash((mem, frozenset(sorted(kwargs.items()))))
except TypeError:
print repr((args, kwargs))
hash_key = None
if not hash_key:
return fun(*args, **kwargs)
try:
ret = get_cache(scope, hash_key)
except KeyError:
ret = fun(*args, **kwargs)
set_cache(scope, hash_key, ret)
return ret
return _memcache_decorated
return _memcache
| 28.5
| 73
| 0.595459
|
235454fcaf4603c01156647864c942b5077d1953
| 707
|
py
|
Python
|
July21/EssentialPython/functions/morefunctions.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | 2
|
2021-05-29T18:21:50.000Z
|
2021-07-24T13:03:30.000Z
|
July21/EssentialPython/functions/morefunctions.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | null | null | null |
July21/EssentialPython/functions/morefunctions.py
|
pythonbykhaja/intesivepython
|
d3074f35bf36a04d4d1d9b4ff4631733d40b5817
|
[
"Apache-2.0"
] | 2
|
2021-05-25T10:19:54.000Z
|
2021-09-21T12:20:48.000Z
|
#def sum(number1, number2):
# pass
def sum(*args):
print(type(args))
result = 0
for arg in args:
result += arg
return result
result = sum(1,2)
print(result)
result = sum(1,2,3,4,5,6,7,8,9)
print(result)
result = sum()
print(result)
def explain_kwargs(**kwargs):
print(type(kwargs))
for key, value in kwargs.items():
print(f"{key} = {value}")
explain_kwargs(name='IHub', course='Python', duration='3')
def fun_function(*args, **kwargs):
print(f"args are {args}")
print(f"keyword args are {kwargs}")
fun_function('python.direct', 'learningthoughts.academy', blog='python.direct')
def rule_function(number, debug=True, *args, **kwargs):
pass
| 18.128205
| 79
| 0.640736
|
f729bdf98d041be17618159f1292cff87ad80dce
| 23,247
|
py
|
Python
|
Application/ryu-lagopus-ext-lagopus-general-tunnel-ext/ryu/ofproto/nx_actions.py
|
okinawaopenlabs/SD-WAN
|
5d8ed92620f07907b89f373f2d93f41e1a265268
|
[
"Apache-2.0"
] | 9
|
2019-12-12T06:57:51.000Z
|
2022-01-10T04:01:49.000Z
|
Application/ryu-lagopus-ext-lagopus-general-tunnel-ext/ryu/ofproto/nx_actions.py
|
okinawaopenlabs/SD-WAN
|
5d8ed92620f07907b89f373f2d93f41e1a265268
|
[
"Apache-2.0"
] | null | null | null |
Application/ryu-lagopus-ext-lagopus-general-tunnel-ext/ryu/ofproto/nx_actions.py
|
okinawaopenlabs/SD-WAN
|
5d8ed92620f07907b89f373f2d93f41e1a265268
|
[
"Apache-2.0"
] | 1
|
2020-03-26T17:58:28.000Z
|
2020-03-26T17:58:28.000Z
|
# Copyright (C) 2015 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2015 YAMAMOTO Takashi <yamamoto at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from ryu import utils
from ryu.lib import type_desc
from ryu.ofproto import nicira_ext
from ryu.ofproto import ofproto_common
from ryu.lib.pack_utils import msg_pack_into
from ryu.ofproto.ofproto_parser import StringifyMixin
def generate(ofp_name, ofpp_name):
import sys
import string
import functools
ofp = sys.modules[ofp_name]
ofpp = sys.modules[ofpp_name]
class _NXFlowSpec(StringifyMixin):
_hdr_fmt_str = '!H' # 2 bit 0s, 1 bit src, 2 bit dst, 11 bit n_bits
_dst_type = None
_subclasses = {}
_TYPE = {
'nx-flow-spec-field': [
'src',
'dst',
]
}
def __init__(self, src, dst, n_bits):
self.src = src
self.dst = dst
self.n_bits = n_bits
@classmethod
def register(cls, subcls):
assert issubclass(subcls, cls)
assert subcls._dst_type not in cls._subclasses
cls._subclasses[subcls._dst_type] = subcls
@classmethod
def parse(cls, buf):
(hdr,) = struct.unpack_from(cls._hdr_fmt_str, buf, 0)
rest = buf[struct.calcsize(cls._hdr_fmt_str):]
if hdr == 0:
return None, rest # all-0 header is no-op for padding
src_type = (hdr >> 13) & 0x1
dst_type = (hdr >> 11) & 0x3
n_bits = hdr & 0x3ff
subcls = cls._subclasses[dst_type]
if src_type == 0: # subfield
src = cls._parse_subfield(rest)
rest = rest[6:]
elif src_type == 1: # immediate
src_len = (n_bits + 15) // 16 * 2
src_bin = rest[:src_len]
src = type_desc.IntDescr(size=src_len).to_user(src_bin)
rest = rest[src_len:]
if dst_type == 0: # match
dst = cls._parse_subfield(rest)
rest = rest[6:]
elif dst_type == 1: # load
dst = cls._parse_subfield(rest)
rest = rest[6:]
elif dst_type == 2: # output
dst = '' # empty
return subcls(src=src, dst=dst, n_bits=n_bits), rest
def serialize(self):
buf = bytearray()
if isinstance(self.src, tuple):
src_type = 0 # subfield
else:
src_type = 1 # immediate
# header
val = (src_type << 13) | (self._dst_type << 11) | self.n_bits
msg_pack_into(self._hdr_fmt_str, buf, 0, val)
# src
if src_type == 0: # subfield
buf += self._serialize_subfield(self.src)
elif src_type == 1: # immediate
src_len = (self.n_bits + 15) // 16 * 2
buf += type_desc.IntDescr(size=src_len).from_user(self.src)
# dst
if self._dst_type == 0: # match
buf += self._serialize_subfield(self.dst)
elif self._dst_type == 1: # load
buf += self._serialize_subfield(self.dst)
elif self._dst_type == 2: # output
pass # empty
return buf
@staticmethod
def _parse_subfield(buf):
(n, len) = ofp.oxm_parse_header(buf, 0)
assert len == 4 # only 4-bytes NXM/OXM are defined
field = ofp.oxm_to_user_header(n)
rest = buf[len:]
(ofs,) = struct.unpack_from('!H', rest, 0)
return (field, ofs)
@staticmethod
def _serialize_subfield(subfield):
(field, ofs) = subfield
buf = bytearray()
n = ofp.oxm_from_user_header(field)
ofp.oxm_serialize_header(n, buf, 0)
assert len(buf) == 4 # only 4-bytes NXM/OXM are defined
msg_pack_into('!H', buf, 4, ofs)
return buf
class NXFlowSpecMatch(_NXFlowSpec):
# Add a match criteria
# an example of the corresponding ovs-ofctl syntax:
# NXM_OF_VLAN_TCI[0..11]
_dst_type = 0
class NXFlowSpecLoad(_NXFlowSpec):
# Add NXAST_REG_LOAD actions
# an example of the corresponding ovs-ofctl syntax:
# NXM_OF_ETH_DST[]=NXM_OF_ETH_SRC[]
_dst_type = 1
class NXFlowSpecOutput(_NXFlowSpec):
# Add an OFPAT_OUTPUT action
# an example of the corresponding ovs-ofctl syntax:
# output:NXM_OF_IN_PORT[]
_dst_type = 2
def __init__(self, src, n_bits, dst=''):
assert dst == ''
super(NXFlowSpecOutput, self).__init__(src=src, dst=dst,
n_bits=n_bits)
class NXAction(ofpp.OFPActionExperimenter):
_fmt_str = '!H' # subtype
_subtypes = {}
_experimenter = ofproto_common.NX_EXPERIMENTER_ID
def __init__(self):
super(NXAction, self).__init__(experimenter=self._experimenter)
self.subtype = self._subtype
@classmethod
def parse(cls, buf):
fmt_str = NXAction._fmt_str
(subtype,) = struct.unpack_from(fmt_str, buf, 0)
subtype_cls = cls._subtypes.get(subtype)
rest = buf[struct.calcsize(fmt_str):]
if subtype_cls is None:
return NXActionUnknown(subtype, rest)
return subtype_cls.parse(rest)
def serialize(self, buf, offset):
super(NXAction, self).serialize(buf, offset)
msg_pack_into(NXAction._fmt_str,
buf,
offset + ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE,
self.subtype)
@classmethod
def register(cls, subtype_cls):
assert subtype_cls._subtype is not cls._subtypes
cls._subtypes[subtype_cls._subtype] = subtype_cls
class NXActionUnknown(NXAction):
def __init__(self, subtype, data=None,
type_=None, len_=None, experimenter=None):
self._subtype = subtype
super(NXActionUnknown, self).__init__()
self.data = data
@classmethod
def parse(cls, subtype, buf):
return cls(data=buf)
def serialize(self, buf, offset):
# fixup
data = self.data
if data is None:
data = bytearray()
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionUnknown, self).serialize(buf, offset)
buf += data
class NXActionRegMove(NXAction):
_subtype = nicira_ext.NXAST_REG_MOVE
_fmt_str = '!HHH' # n_bits, src_ofs, dst_ofs
# Followed by OXM fields (src, dst) and padding to 8 bytes boundary
_TYPE = {
'ascii': [
'src_field',
'dst_field',
]
}
def __init__(self, src_field, dst_field, n_bits, src_ofs=0, dst_ofs=0,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionRegMove, self).__init__()
self.n_bits = n_bits
self.src_ofs = src_ofs
self.dst_ofs = dst_ofs
self.src_field = src_field
self.dst_field = dst_field
@classmethod
def parse(cls, buf):
(n_bits, src_ofs, dst_ofs,) = struct.unpack_from(
NXActionRegMove._fmt_str, buf, 0)
rest = buf[struct.calcsize(NXActionRegMove._fmt_str):]
# src field
(n, len) = ofp.oxm_parse_header(rest, 0)
src_field = ofp.oxm_to_user_header(n)
rest = rest[len:]
# dst field
(n, len) = ofp.oxm_parse_header(rest, 0)
dst_field = ofp.oxm_to_user_header(n)
rest = rest[len:]
# ignore padding
return cls(src_field, dst_field=dst_field, n_bits=n_bits,
src_ofs=src_ofs, dst_ofs=dst_ofs)
def serialize(self, buf, offset):
# fixup
data = bytearray()
msg_pack_into(NXActionRegMove._fmt_str, data, 0,
self.n_bits, self.src_ofs, self.dst_ofs)
# src field
n = ofp.oxm_from_user_header(self.src_field)
ofp.oxm_serialize_header(n, data, len(data))
# dst field
n = ofp.oxm_from_user_header(self.dst_field)
ofp.oxm_serialize_header(n, data, len(data))
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionRegMove, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
class NXActionLearn(NXAction):
_subtype = nicira_ext.NXAST_LEARN
# idle_timeout, hard_timeout, priority, cookie, flags,
# table_id, pad, fin_idle_timeout, fin_hard_timeout
_fmt_str = '!HHHQHBxHH'
# Followed by flow_mod_specs
def __init__(self,
table_id,
specs,
idle_timeout=0,
hard_timeout=0,
priority=ofp.OFP_DEFAULT_PRIORITY,
cookie=0,
flags=0,
fin_idle_timeout=0,
fin_hard_timeout=0,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionLearn, self).__init__()
self.idle_timeout = idle_timeout
self.hard_timeout = hard_timeout
self.priority = priority
self.cookie = cookie
self.flags = flags
self.table_id = table_id
self.fin_idle_timeout = fin_idle_timeout
self.fin_hard_timeout = fin_hard_timeout
self.specs = specs
@classmethod
def parse(cls, buf):
(idle_timeout,
hard_timeout,
priority,
cookie,
flags,
table_id,
fin_idle_timeout,
fin_hard_timeout,) = struct.unpack_from(
NXActionLearn._fmt_str, buf, 0)
rest = buf[struct.calcsize(NXActionLearn._fmt_str):]
# specs
specs = []
while len(rest) > 0:
spec, rest = _NXFlowSpec.parse(rest)
if spec is None:
continue
specs.append(spec)
return cls(idle_timeout=idle_timeout,
hard_timeout=hard_timeout,
priority=priority,
cookie=cookie,
flags=flags,
table_id=table_id,
fin_idle_timeout=fin_idle_timeout,
fin_hard_timeout=fin_hard_timeout,
specs=specs)
def serialize(self, buf, offset):
# fixup
data = bytearray()
msg_pack_into(NXActionLearn._fmt_str, data, 0,
self.idle_timeout,
self.hard_timeout,
self.priority,
self.cookie,
self.flags,
self.table_id,
self.fin_idle_timeout,
self.fin_hard_timeout)
for spec in self.specs:
data += spec.serialize()
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionLearn, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
class NXActionConjunction(NXAction):
_subtype = nicira_ext.NXAST_CONJUNCTION
# clause, n_clauses, id
_fmt_str = '!BBI'
def __init__(self,
clause,
n_clauses,
id_,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionConjunction, self).__init__()
self.clause = clause
self.n_clauses = n_clauses
self.id = id_
@classmethod
def parse(cls, buf):
(clause,
n_clauses,
id_,) = struct.unpack_from(
NXActionConjunction._fmt_str, buf, 0)
return cls(clause, n_clauses, id_)
def serialize(self, buf, offset):
data = bytearray()
msg_pack_into(NXActionConjunction._fmt_str, data, 0,
self.clause,
self.n_clauses,
self.id)
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionConjunction, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
class NXActionResubmitTable(NXAction):
_subtype = nicira_ext.NXAST_RESUBMIT_TABLE
# in_port, table_id
_fmt_str = '!HB3x'
def __init__(self,
in_port,
table_id,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionResubmitTable, self).__init__()
self.in_port = in_port
self.table_id = table_id
@classmethod
def parse(cls, buf):
(in_port,
table_id) = struct.unpack_from(
NXActionResubmitTable._fmt_str, buf, 0)
return cls(in_port, table_id)
def serialize(self, buf, offset):
data = bytearray()
msg_pack_into(NXActionResubmitTable._fmt_str, data, 0,
self.in_port,
self.table_id)
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionResubmitTable, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
class NXActionCT(NXAction):
_subtype = nicira_ext.NXAST_CT
# flags, zone_src, zone_ofs_nbits (zone_imm), recirc_table,
# pad, alg
_fmt_str = '!HIHB3xH'
# Followed by actions
def __init__(self,
flags,
zone_src,
zone_ofs_nbits, # is zone_imm if zone_src == 0
recirc_table,
alg,
actions,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionCT, self).__init__()
self.flags = flags
self.zone_src = zone_src
self.zone_ofs_nbits = zone_ofs_nbits
self.recirc_table = recirc_table
self.alg = alg
self.actions = actions
@classmethod
def parse(cls, buf):
(flags,
zone_src,
zone_ofs_nbits,
recirc_table,
alg,) = struct.unpack_from(
NXActionCT._fmt_str, buf, 0)
rest = buf[struct.calcsize(NXActionCT._fmt_str):]
# actions
actions = []
while len(rest) > 0:
action = ofpp.OFPAction.parser(rest, 0)
actions.append(action)
rest = rest[action.len:]
return cls(flags, zone_src, zone_ofs_nbits, recirc_table,
alg, actions)
def serialize(self, buf, offset):
data = bytearray()
msg_pack_into(NXActionCT._fmt_str, data, 0,
self.flags,
self.zone_src,
self.zone_ofs_nbits,
self.recirc_table,
self.alg)
for a in self.actions:
a.serialize(data, len(data))
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionCT, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
class NXActionNAT(NXAction):
_subtype = nicira_ext.NXAST_NAT
# pad, flags, range_present
_fmt_str = '!2xHH'
# Followed by optional parameters
_TYPE = {
'ascii': [
'range_ipv4_max',
'range_ipv4_min',
'range_ipv6_max',
'range_ipv6_min',
]
}
def __init__(self,
flags,
range_ipv4_min='',
range_ipv4_max='',
range_ipv6_min='',
range_ipv6_max='',
range_proto_min=None,
range_proto_max=None,
type_=None, len_=None, experimenter=None, subtype=None):
super(NXActionNAT, self).__init__()
self.flags = flags
self.range_ipv4_min = range_ipv4_min
self.range_ipv4_max = range_ipv4_max
self.range_ipv6_min = range_ipv6_min
self.range_ipv6_max = range_ipv6_max
self.range_proto_min = range_proto_min
self.range_proto_max = range_proto_max
@classmethod
def parse(cls, buf):
(flags,
range_present) = struct.unpack_from(
NXActionNAT._fmt_str, buf, 0)
rest = buf[struct.calcsize(NXActionNAT._fmt_str):]
# optional parameters
kwargs = dict()
if range_present & nicira_ext.NX_NAT_RANGE_IPV4_MIN:
kwargs['range_ipv4_min'] = type_desc.IPv4Addr.to_user(rest[:4])
rest = rest[4:]
if range_present & nicira_ext.NX_NAT_RANGE_IPV4_MAX:
kwargs['range_ipv4_max'] = type_desc.IPv4Addr.to_user(rest[:4])
rest = rest[4:]
if range_present & nicira_ext.NX_NAT_RANGE_IPV6_MIN:
kwargs['range_ipv6_min'] = (
type_desc.IPv6Addr.to_user(rest[:16]))
rest = rest[16:]
if range_present & nicira_ext.NX_NAT_RANGE_IPV6_MAX:
kwargs['range_ipv6_max'] = (
type_desc.IPv6Addr.to_user(rest[:16]))
rest = rest[16:]
if range_present & NX_NAT_RANGE_PROTO_MIN:
kwargs['range_proto_min'] = type_desc.Int2.to_user(rest[:2])
rest = rest[2:]
if range_present & NX_NAT_RANGE_PROTO_MAX:
kwargs['range_proto_max'] = type_desc.Int2.to_user(rest[:2])
return cls(flags, **kwargs)
def serialize(self, buf, offset):
# Pack optional parameters first, as range_present needs
# to be calculated.
optional_data = b''
range_present = 0
if self.range_ipv4_min != '':
range_present |= nicira_ext.NX_NAT_RANGE_IPV4_MIN
optional_data += type_desc.IPv4Addr.from_user(
self.range_ipv4_min)
if self.range_ipv4_max != '':
range_present |= nicira_ext.NX_NAT_RANGE_IPV4_MAX
optional_data += type_desc.IPv4Addr.from_user(
self.range_ipv4_max)
if self.range_ipv6_min != '':
range_present |= nicira_ext.NX_NAT_RANGE_IPV6_MIN
optional_data += type_desc.IPv6Addr.from_user(
self.range_ipv6_min)
if self.range_ipv6_max != '':
range_present |= nicira_ext.NX_NAT_RANGE_IPV6_MAX
optional_data += type_desc.IPv6Addr.from_user(
self.range_ipv6_max)
if self.range_proto_min is not None:
range_present |= nicira_ext.NX_NAT_RANGE_PROTO_MIN
optional_data += type_desc.Int2.from_user(
self.range_proto_min)
if self.range_proto_max is not None:
range_present |= nicira_ext.NX_NAT_RANGE_PROTO_MAX
optional_data += type_desc.Int2.from_user(
self.range_proto_max)
data = bytearray()
msg_pack_into(NXActionNAT._fmt_str, data, 0,
self.flags,
range_present)
msg_pack_into('!%ds' % len(optional_data), data, len(data),
optional_data)
payload_offset = (
ofp.OFP_ACTION_EXPERIMENTER_HEADER_SIZE +
struct.calcsize(NXAction._fmt_str)
)
self.len = utils.round_up(payload_offset + len(data), 8)
super(NXActionNAT, self).serialize(buf, offset)
msg_pack_into('!%ds' % len(data), buf, offset + payload_offset,
bytes(data))
def add_attr(k, v):
v.__module__ = ofpp.__name__ # Necessary for stringify stuff
setattr(ofpp, k, v)
add_attr('NXAction', NXAction)
add_attr('NXActionUnknown', NXActionUnknown)
classes = [
'NXActionRegMove',
'NXActionLearn',
'NXActionConjunction',
'NXActionResubmitTable',
'NXActionCT',
'NXActionNAT',
'_NXFlowSpec', # exported for testing
'NXFlowSpecMatch',
'NXFlowSpecLoad',
'NXFlowSpecOutput',
]
vars = locals()
for name in classes:
cls = vars[name]
add_attr(name, cls)
if issubclass(cls, NXAction):
NXAction.register(cls)
if issubclass(cls, _NXFlowSpec):
_NXFlowSpec.register(cls)
| 37.616505
| 79
| 0.534392
|
bc24ee6e2180e68e5d3a04d27d633522342fcaad
| 1,926
|
py
|
Python
|
mother_map.py
|
dimelab-public/MTConnect-Testbed-Simulator-Public
|
4b42052953d042418ddecbd5ed8608ccbdbaa189
|
[
"MIT"
] | null | null | null |
mother_map.py
|
dimelab-public/MTConnect-Testbed-Simulator-Public
|
4b42052953d042418ddecbd5ed8608ccbdbaa189
|
[
"MIT"
] | null | null | null |
mother_map.py
|
dimelab-public/MTConnect-Testbed-Simulator-Public
|
4b42052953d042418ddecbd5ed8608ccbdbaa189
|
[
"MIT"
] | 1
|
2019-12-09T14:52:18.000Z
|
2019-12-09T14:52:18.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 17 13:32:51 2019
@author: smehdi@ncsu.edu
"""
import folium
import pandas as pd
master = pd.read_csv('dbs\master.csv')
lat=list(master['latitude'])
lon=list(master['longitude'])
orgs = list(master['names'])
machs = list(master['machines'])
cu = list(master['CapacityUtil'])
ports =list(master['agentPorts'])
#These coordinates are to position the map to the best view.
US_COORDINATES = (39.0902,-98.7129)
NumOrg = len(orgs)
# create empty map zoomed in on US_Coordinates
mape = folium.Map(location=US_COORDINATES,control_scale=False, zoom_start=5)
# add a marker for every record in the filtered data, use a clustered view
for i in range(NumOrg):
P=ports[i]
M=machs[i]
C=cu[i]
if C==0:
COL='red'
if (1<=C<=30):
COL='orange'
if (C>30):
COL='green'
info = folium.Html(f'''<h4><span style="color: #3366ff;">{orgs[i]} - port : {P}</span></h4>
<table style="height: 30px;" width="284">
<tbody>
<tr>
<td style="width: 169px;">
<h4>MTConnect :</h4>
</td>
<td style="width: 99px;"><h6><a href="http://localhost:{P}/current" target="_blank" rel="noopener">Current |</a><a href="http://localhost:{P}/probe" target="_blank" rel="noopener"> Probe</h6></a></td>
</tr>
<tr>
<td style="width: 169px;">
<h4>Total Machines:
</td>
<td style="width: 99px;"><b>{M}</b></td>
</tr>
<tr>
<td style="width: 169px;">
<h4>Capacity Utilization:
</td>
<td style="width: 99px;"><b>{C}%</h4></b></td>
</tr>
</tbody>
</table>''',width=300, script=True)
if (C==100):
I='star'
else:
I='info-sign'
popup = folium.Popup(info, max_width=2650)
#put custom text whenever the marker of machine clicked on map.
folium.Marker(location=[lat[i],lon[i]],icon=folium.Icon(icon=I,color=COL), popup=popup).add_to(mape)
#save map in the view html file to be rendered by flask.
mape.save("templates/view.html")
| 25.012987
| 200
| 0.636033
|
78ffa878e6c22d0e8fcd0280583f1711954e1ef5
| 3,418
|
py
|
Python
|
apache/setup.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 2
|
2019-04-03T17:21:38.000Z
|
2020-02-04T16:28:05.000Z
|
apache/setup.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 10
|
2018-02-27T19:06:07.000Z
|
2021-08-30T03:23:26.000Z
|
apache/setup.py
|
chotiwat/integrations-core
|
57a4437790cafcb769e120aa64d84aaecbf6a414
|
[
"BSD-3-Clause"
] | 6
|
2018-01-09T21:37:20.000Z
|
2020-05-26T09:28:09.000Z
|
# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
import json
import re
here = path.abspath(path.dirname(__file__))
def parse_req_line(line):
line = line.strip()
if not line or line.startswith('--hash') or line[0] == '#':
return None
req = line.rpartition('#')
if len(req[1]) == 0:
line = req[2].strip()
else:
line = req[1].strip()
if '--hash=' in line:
line = line[:line.find('--hash=')].strip()
if ';' in line:
line = line[:line.find(';')].strip()
if '\\' in line:
line = line[:line.find('\\')].strip()
return line
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Parse requirements
runtime_reqs = ['datadog-checks-base']
with open(path.join(here, 'requirements.txt'), encoding='utf-8') as f:
for line in f.readlines():
req = parse_req_line(line)
if req:
runtime_reqs.append(req)
def read(*parts):
with open(path.join(here, *parts), 'r') as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
# https://packaging.python.org/guides/single-sourcing-package-version/
version = find_version("datadog_checks", "apache", "__init__.py")
manifest_version = None
with open(path.join(here, 'manifest.json'), encoding='utf-8') as f:
manifest = json.load(f)
manifest_version = manifest.get('version')
if version != manifest_version:
raise Exception("Inconsistent versioning in module and manifest - aborting wheel build")
setup(
name='datadog-apache',
version=version,
description='The Apache check',
long_description=long_description,
keywords='datadog agent apache check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='packages@datadoghq.com',
# License
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.apache'],
# Run-time dependencies
install_requires=list(set(runtime_reqs)),
# Development dependencies, run with:
# $ pip install -e .[dev]
extras_require={
'dev': [
'check-manifest',
'datadog_agent_tk>=5.15',
],
},
# Testing setup and dependencies
tests_require=[
'nose',
'coverage',
'datadog_agent_tk>=5.15',
],
test_suite='nose.collector',
# Extra files to ship with the wheel package
package_data={b'datadog_checks.apache': ['conf.yaml.example']},
include_package_data=True,
)
| 28.247934
| 92
| 0.630778
|
6a13667456a5bd369871d09c0dd2ca844aaadd85
| 5,987
|
py
|
Python
|
github3_utils/check_labels.py
|
domdfcoding/github3-utils
|
f71797293606d09f7fc7b052045cf95efde2fc3e
|
[
"MIT"
] | 1
|
2021-08-16T18:39:12.000Z
|
2021-08-16T18:39:12.000Z
|
github3_utils/check_labels.py
|
domdfcoding/github3-utils
|
f71797293606d09f7fc7b052045cf95efde2fc3e
|
[
"MIT"
] | 15
|
2021-01-04T18:29:30.000Z
|
2021-03-29T19:03:58.000Z
|
github3_utils/check_labels.py
|
domdfcoding/github3-utils
|
f71797293606d09f7fc7b052045cf95efde2fc3e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
#
# check_labels.py
"""
Helpers for creating labels to mark pull requests with which tests are failing.
.. versionadded:: 0.4.0
"""
#
# Copyright © 2021 Dominic Davis-Foster <dominic@davis-foster.co.uk>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
# stdlib
import re
from typing import Dict, NamedTuple, Set, Union
# 3rd party
import attr
import github3.issues.label
from domdf_python_tools.doctools import prettify_docstrings
from github3.checks import CheckRun
from github3.issues import Issue
from github3.pulls import PullRequest, ShortPullRequest
from github3.repos import Repository
from github3.repos.commit import ShortCommit
__all__ = ["Label", "check_status_labels", "Checks", "get_checks_for_pr", "label_pr_failures"]
@prettify_docstrings
@attr.s(frozen=True, slots=True)
class Label:
"""
Represents an issue or pull request label.
"""
#: The text of the label.
name: str = attr.ib(converter=str)
#: The background colour of the label.
color: str = attr.ib(converter=str)
#: A short description of the label.
description: str = attr.ib(default=None)
def __str__(self) -> str:
return self.name
def to_dict(self) -> Dict[str, str]:
"""
Return the :class:`~.Label` as a dictionary.
"""
return {
"name": self.name,
"color": self.color,
"description": self.description,
}
def create(self, repo: Repository) -> github3.issues.label.Label:
"""
Create this label on the given repository.
:param repo:
"""
return repo.create_label(**self.to_dict())
check_status_labels: Dict[str, Label] = {
label.name: label
for label in [
Label("failure: flake8", "#B60205", "The Flake8 check is failing."),
Label("failure: mypy", "#DC1C13", "The mypy check is failing."),
Label("failure: docs", "#EA4C46", "The docs check is failing."),
Label("failure: Windows", "#F07470", "The Windows tests are failing."),
Label("failure: Linux", "#F6BDC0", "The Linux tests are failing."),
# Label("failure: Multiple", "#D93F0B", "Multiple checks are failing."),
]
}
"""
Labels corresponding to failing pull request checks.
"""
# The ``failure: Multiple`` label is used if three or more categories failing.
class Checks(NamedTuple):
"""
Represents the sets of status checks returned by :func:`~.get_checks_for_pr`.
"""
successful: Set[str]
failing: Set[str]
running: Set[str]
skipped: Set[str]
neutral: Set[str]
def get_checks_for_pr(pull: Union[PullRequest, ShortPullRequest]) -> Checks:
"""
Returns a :class:`~.Checks` object containing sets of check names grouped by their status.
:param pull: The pull request to obtain checks for.
"""
head_commit: ShortCommit = list(pull.commits())[-1]
failing = set()
running = set()
successful = set()
skipped = set()
neutral = set()
check_run: CheckRun
for check_run in head_commit.check_runs():
if check_run.status in {"queued", "running", "in_progress"}:
running.add(check_run.name)
elif check_run.conclusion in {"failure", "cancelled", "timed_out", "action_required"}:
failing.add(check_run.name)
elif check_run.conclusion == "success":
successful.add(check_run.name)
elif check_run.conclusion == "skipped":
skipped.add(check_run.name)
elif check_run.conclusion == "neutral":
neutral.add(check_run.name)
# Remove failing checks from successful etc. (as all checks appear twice for PRs)
successful = successful - failing - running
running = running - failing
skipped = skipped - running - failing - successful
neutral = neutral - running - failing - successful
return Checks(
successful=successful,
failing=failing,
running=running,
skipped=skipped,
neutral=neutral,
)
_python_dev_re = re.compile(r".*Python\s*\d+\.\d+.*(dev|alpha|beta|rc).*", flags=re.IGNORECASE)
def label_pr_failures(pull: Union[PullRequest, ShortPullRequest]) -> Set[str]:
"""
Labels the given pull request to indicate which checks are failing.
:param pull:
:return: The new labels set for the pull request.
"""
pr_checks = get_checks_for_pr(pull)
failure_labels: Set[str] = set()
success_labels: Set[str] = set()
def determine_labels(from_, to):
for check in from_:
if _python_dev_re.match(check):
continue
if check in {"Flake8", "docs"}:
to.add(f"failure: {check.lower()}")
elif check.startswith("mypy"):
to.add("failure: mypy")
elif check.startswith("ubuntu"):
to.add("failure: Linux")
elif check.startswith("windows"):
to.add("failure: Windows")
determine_labels(pr_checks.failing, failure_labels)
determine_labels(pr_checks.successful, success_labels)
issue: Issue = pull.issue()
current_labels = {label.name for label in issue.labels()}
for label in success_labels:
if label in current_labels and label not in failure_labels:
issue.remove_label(label)
current_labels -= success_labels
current_labels.update(failure_labels)
issue.add_labels(*current_labels)
return current_labels
| 28.509524
| 95
| 0.72173
|
74b5ad65af191c7caf0e33ba56ff18385148ba6b
| 91,792
|
py
|
Python
|
lib/wallet.py
|
zakurai/electrum-xsh3
|
cec82adcfa3acacc1af806f5eec8aeb97592e919
|
[
"MIT"
] | 1
|
2021-05-29T02:35:02.000Z
|
2021-05-29T02:35:02.000Z
|
lib/wallet.py
|
zakurai/electrum-xsh3
|
cec82adcfa3acacc1af806f5eec8aeb97592e919
|
[
"MIT"
] | 1
|
2021-11-15T17:48:00.000Z
|
2021-11-15T17:48:00.000Z
|
lib/wallet.py
|
zakurai/electrum-xsh3
|
cec82adcfa3acacc1af806f5eec8aeb97592e919
|
[
"MIT"
] | 1
|
2019-02-27T20:56:22.000Z
|
2019-02-27T20:56:22.000Z
|
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Wallet classes:
# - Imported_Wallet: imported address, no keystore
# - Standard_Wallet: one keystore, P2PKH
# - Multisig_Wallet: several keystores, P2SH
import os
import threading
import random
import time
import json
import copy
import errno
import traceback
from functools import partial
from collections import defaultdict
from numbers import Number
from decimal import Decimal
import itertools
import sys
from .i18n import _
from .util import (NotEnoughFunds, PrintError, UserCancelled, profiler,
format_satoshis, format_fee_satoshis, NoDynamicFeeEstimates,
TimeoutException, WalletFileException, BitcoinException,
InvalidPassword)
from .bitcoin import *
from .version import *
from .keystore import load_keystore, Hardware_KeyStore
from .storage import multisig_type, STO_EV_PLAINTEXT, STO_EV_USER_PW, STO_EV_XPUB_PW
from . import transaction
from .transaction import Transaction
from .plugins import run_hook
from . import bitcoin
from . import coinchooser
from .synchronizer import Synchronizer
from .verifier import SPV
from . import paymentrequest
from .paymentrequest import PR_PAID, PR_UNPAID, PR_UNKNOWN, PR_EXPIRED
from .paymentrequest import InvoiceStore
from .contacts import Contacts
TX_STATUS = [
_('Unconfirmed'),
_('Unconfirmed parent'),
_('Not Verified'),
_('Local'),
]
TX_HEIGHT_LOCAL = -2
TX_HEIGHT_UNCONF_PARENT = -1
TX_HEIGHT_UNCONFIRMED = 0
def relayfee(network):
from .simple_config import FEERATE_DEFAULT_RELAY
MAX_RELAY_FEE = 10 * FEERATE_DEFAULT_RELAY
f = network.relay_fee if network and network.relay_fee else FEERATE_DEFAULT_RELAY
return min(f, MAX_RELAY_FEE)
def dust_threshold(network):
# Change <= dust threshold is added to the tx fee
return 182 * 3 * relayfee(network) / 1000
def append_utxos_to_inputs(inputs, network, pubkey, txin_type, imax):
if txin_type != 'p2pk':
address = bitcoin.pubkey_to_address(txin_type, pubkey)
scripthash = bitcoin.address_to_scripthash(address)
else:
script = bitcoin.public_key_to_p2pk_script(pubkey)
scripthash = bitcoin.script_to_scripthash(script)
address = '(pubkey)'
u = network.listunspent_for_scripthash(scripthash)
for item in u:
if len(inputs) >= imax:
break
item['address'] = address
item['type'] = txin_type
item['prevout_hash'] = item['tx_hash']
item['prevout_n'] = int(item['tx_pos'])
item['pubkeys'] = [pubkey]
item['x_pubkeys'] = [pubkey]
item['signatures'] = [None]
item['num_sig'] = 1
inputs.append(item)
def sweep_preparations(privkeys, network, imax=100):
def find_utxos_for_privkey(txin_type, privkey, compressed):
pubkey = ecc.ECPrivkey(privkey).get_public_key_hex(compressed=compressed)
append_utxos_to_inputs(inputs, network, pubkey, txin_type, imax)
keypairs[pubkey] = privkey, compressed
inputs = []
keypairs = {}
for sec in privkeys:
txin_type, privkey, compressed = bitcoin.deserialize_privkey(sec)
find_utxos_for_privkey(txin_type, privkey, compressed)
# do other lookups to increase support coverage
if is_minikey(sec):
# minikeys don't have a compressed byte
# we lookup both compressed and uncompressed pubkeys
find_utxos_for_privkey(txin_type, privkey, not compressed)
elif txin_type == 'p2pkh':
# WIF serialization does not distinguish p2pkh and p2pk
# we also search for pay-to-pubkey outputs
find_utxos_for_privkey('p2pk', privkey, compressed)
if not inputs:
raise Exception(_('No inputs found. (Note that inputs need to be confirmed)'))
# FIXME actually inputs need not be confirmed now, see https://github.com/kyuupichan/electrumx/issues/365
return inputs, keypairs
def sweep(privkeys, network, config, recipient, fee=None, imax=100):
inputs, keypairs = sweep_preparations(privkeys, network, imax)
total = sum(i.get('value') for i in inputs)
if fee is None:
outputs = [(TYPE_ADDRESS, recipient, total)]
tx = Transaction.from_io(inputs, outputs)
fee = config.estimate_fee(tx.estimated_size())
if total - fee < 0:
raise Exception(_('Not enough funds on address.') + '\nTotal: %d satoshis\nFee: %d'%(total, fee))
if total - fee < dust_threshold(network):
raise Exception(_('Not enough funds on address.') + '\nTotal: %d satoshis\nFee: %d\nDust Threshold: %d'%(total, fee, dust_threshold(network)))
outputs = [(TYPE_ADDRESS, recipient, total - fee)]
locktime = network.get_local_height()
tx = Transaction.from_io(inputs, outputs, locktime=locktime)
tx.BIP_LI01_sort()
tx.set_rbf(True)
tx.sign(keypairs)
return tx
class AddTransactionException(Exception):
pass
class UnrelatedTransactionException(AddTransactionException):
def __str__(self):
return _("Transaction is unrelated to this wallet.")
class CannotBumpFee(Exception): pass
class Abstract_Wallet(PrintError):
"""
Wallet classes are created to handle various address generation methods.
Completion states (watching-only, single account, no seed, etc) are handled inside classes.
"""
max_change_outputs = 3
def __init__(self, storage):
self.electrum_version = ELECTRUM_VERSION
self.storage = storage
self.network = None
# verifier (SPV) and synchronizer are started in start_threads
self.synchronizer = None
self.verifier = None
self.gap_limit_for_change = 6 # constant
# locks: if you need to take multiple ones, acquire them in the order they are defined here!
self.lock = threading.RLock()
self.transaction_lock = threading.RLock()
# saved fields
self.use_change = storage.get('use_change', True)
self.multiple_change = storage.get('multiple_change', False)
self.labels = storage.get('labels', {})
self.frozen_addresses = set(storage.get('frozen_addresses',[]))
self.history = storage.get('addr_history',{}) # address -> list(txid, height)
self.fiat_value = storage.get('fiat_value', {})
self.receive_requests = storage.get('payment_requests', {})
# Verified transactions. txid -> (height, timestamp, block_pos). Access with self.lock.
self.verified_tx = storage.get('verified_tx3', {})
# Transactions pending verification. txid -> tx_height. Access with self.lock.
self.unverified_tx = defaultdict(int)
self.load_keystore()
self.load_addresses()
self.test_addresses_sanity()
self.load_transactions()
self.load_local_history()
self.check_history()
self.load_unverified_transactions()
self.remove_local_transactions_we_dont_have()
# There is a difference between wallet.up_to_date and network.is_up_to_date().
# network.is_up_to_date() returns true when all requests have been answered and processed
# wallet.up_to_date is true when the wallet is synchronized (stronger requirement)
# Neither of them considers the verifier.
self.up_to_date = False
# save wallet type the first time
if self.storage.get('wallet_type') is None:
self.storage.put('wallet_type', self.wallet_type)
# invoices and contacts
self.invoices = InvoiceStore(self.storage)
self.contacts = Contacts(self.storage)
self.coin_price_cache = {}
def diagnostic_name(self):
return self.basename()
def __str__(self):
return self.basename()
def get_master_public_key(self):
return None
@profiler
def load_transactions(self):
# load txi, txo, tx_fees
self.txi = self.storage.get('txi', {})
for txid, d in list(self.txi.items()):
for addr, lst in d.items():
self.txi[txid][addr] = set([tuple(x) for x in lst])
self.txo = self.storage.get('txo', {})
self.tx_fees = self.storage.get('tx_fees', {})
tx_list = self.storage.get('transactions', {})
# load transactions
self.transactions = {}
for tx_hash, raw in tx_list.items():
tx = Transaction(raw)
self.transactions[tx_hash] = tx
if self.txi.get(tx_hash) is None and self.txo.get(tx_hash) is None:
self.print_error("removing unreferenced tx", tx_hash)
self.transactions.pop(tx_hash)
# load spent_outpoints
_spent_outpoints = self.storage.get('spent_outpoints', {})
self.spent_outpoints = defaultdict(dict)
for prevout_hash, d in _spent_outpoints.items():
for prevout_n_str, spending_txid in d.items():
prevout_n = int(prevout_n_str)
self.spent_outpoints[prevout_hash][prevout_n] = spending_txid
@profiler
def load_local_history(self):
self._history_local = {} # address -> set(txid)
for txid in itertools.chain(self.txi, self.txo):
self._add_tx_to_local_history(txid)
def remove_local_transactions_we_dont_have(self):
txid_set = set(self.txi) | set(self.txo)
for txid in txid_set:
tx_height = self.get_tx_height(txid)[0]
if tx_height == TX_HEIGHT_LOCAL and txid not in self.transactions:
self.remove_transaction(txid)
@profiler
def save_transactions(self, write=False):
with self.transaction_lock:
tx = {}
for k,v in self.transactions.items():
tx[k] = str(v)
self.storage.put('transactions', tx)
self.storage.put('txi', self.txi)
self.storage.put('txo', self.txo)
self.storage.put('tx_fees', self.tx_fees)
self.storage.put('addr_history', self.history)
self.storage.put('spent_outpoints', self.spent_outpoints)
if write:
self.storage.write()
def save_verified_tx(self, write=False):
with self.lock:
self.storage.put('verified_tx3', self.verified_tx)
if write:
self.storage.write()
def clear_history(self):
with self.lock:
with self.transaction_lock:
self.txi = {}
self.txo = {}
self.tx_fees = {}
self.spent_outpoints = defaultdict(dict)
self.history = {}
self.verified_tx = {}
self.transactions = {}
self.save_transactions()
@profiler
def check_history(self):
save = False
hist_addrs_mine = list(filter(lambda k: self.is_mine(k), self.history.keys()))
hist_addrs_not_mine = list(filter(lambda k: not self.is_mine(k), self.history.keys()))
for addr in hist_addrs_not_mine:
self.history.pop(addr)
save = True
for addr in hist_addrs_mine:
hist = self.history[addr]
for tx_hash, tx_height in hist:
if self.txi.get(tx_hash) or self.txo.get(tx_hash):
continue
tx = self.transactions.get(tx_hash)
if tx is not None:
self.add_transaction(tx_hash, tx, allow_unrelated=True)
save = True
if save:
self.save_transactions()
def basename(self):
return os.path.basename(self.storage.path)
def save_addresses(self):
self.storage.put('addresses', {'receiving':self.receiving_addresses, 'change':self.change_addresses})
def load_addresses(self):
d = self.storage.get('addresses', {})
if type(d) != dict: d={}
self.receiving_addresses = d.get('receiving', [])
self.change_addresses = d.get('change', [])
def test_addresses_sanity(self):
addrs = self.get_receiving_addresses()
if len(addrs) > 0:
if not bitcoin.is_address(addrs[0]):
raise WalletFileException('The addresses in this wallet are not SHIELD addresses.')
def synchronize(self):
pass
def is_deterministic(self):
return self.keystore.is_deterministic()
def set_up_to_date(self, up_to_date):
with self.lock:
self.up_to_date = up_to_date
if up_to_date:
self.save_transactions(write=True)
# if the verifier is also up to date, persist that too;
# otherwise it will persist its results when it finishes
if self.verifier and self.verifier.is_up_to_date():
self.save_verified_tx(write=True)
def is_up_to_date(self):
with self.lock: return self.up_to_date
def set_label(self, name, text = None):
changed = False
old_text = self.labels.get(name)
if text:
text = text.replace("\n", " ")
if old_text != text:
self.labels[name] = text
changed = True
else:
if old_text:
self.labels.pop(name)
changed = True
if changed:
run_hook('set_label', self, name, text)
self.storage.put('labels', self.labels)
return changed
def set_fiat_value(self, txid, ccy, text):
if txid not in self.transactions:
return
if not text:
d = self.fiat_value.get(ccy, {})
if d and txid in d:
d.pop(txid)
else:
return
else:
try:
Decimal(text)
except:
return
if ccy not in self.fiat_value:
self.fiat_value[ccy] = {}
self.fiat_value[ccy][txid] = text
self.storage.put('fiat_value', self.fiat_value)
def get_fiat_value(self, txid, ccy):
fiat_value = self.fiat_value.get(ccy, {}).get(txid)
try:
return Decimal(fiat_value)
except:
return
def is_mine(self, address):
return address in self.get_addresses()
def is_change(self, address):
if not self.is_mine(address):
return False
return self.get_address_index(address)[0]
def get_address_index(self, address):
raise NotImplementedError()
def get_redeem_script(self, address):
return None
def export_private_key(self, address, password):
if self.is_watching_only():
return []
index = self.get_address_index(address)
pk, compressed = self.keystore.get_private_key(index, password)
txin_type = self.get_txin_type(address)
redeem_script = self.get_redeem_script(address)
serialized_privkey = bitcoin.serialize_privkey(pk, compressed, txin_type)
return serialized_privkey, redeem_script
def get_public_keys(self, address):
return [self.get_public_key(address)]
def add_unverified_tx(self, tx_hash, tx_height):
if tx_height in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT) \
and tx_hash in self.verified_tx:
with self.lock:
self.verified_tx.pop(tx_hash)
if self.verifier:
self.verifier.remove_spv_proof_for_tx(tx_hash)
# tx will be verified only if height > 0
if tx_hash not in self.verified_tx:
with self.lock:
self.unverified_tx[tx_hash] = tx_height
def add_verified_tx(self, tx_hash, info):
# Remove from the unverified map and add to the verified map
with self.lock:
self.unverified_tx.pop(tx_hash, None)
self.verified_tx[tx_hash] = info # (tx_height, timestamp, pos)
height, conf, timestamp = self.get_tx_height(tx_hash)
self.network.trigger_callback('verified', tx_hash, height, conf, timestamp)
def get_unverified_txs(self):
'''Returns a map from tx hash to transaction height'''
with self.lock:
return dict(self.unverified_tx) # copy
def undo_verifications(self, blockchain, height):
'''Used by the verifier when a reorg has happened'''
txs = set()
with self.lock:
for tx_hash, item in list(self.verified_tx.items()):
tx_height, timestamp, pos = item
if tx_height >= height:
header = blockchain.read_header(tx_height)
# fixme: use block hash, not timestamp
if not header or header.get('timestamp') != timestamp:
self.verified_tx.pop(tx_hash, None)
txs.add(tx_hash)
return txs
def get_local_height(self):
""" return last known height if we are offline """
return self.network.get_local_height() if self.network else self.storage.get('stored_height', 0)
def get_tx_height(self, tx_hash):
""" Given a transaction, returns (height, conf, timestamp) """
with self.lock:
if tx_hash in self.verified_tx:
height, timestamp, pos = self.verified_tx[tx_hash]
conf = max(self.get_local_height() - height + 1, 0)
return height, conf, timestamp
elif tx_hash in self.unverified_tx:
height = self.unverified_tx[tx_hash]
return height, 0, None
else:
# local transaction
return TX_HEIGHT_LOCAL, 0, None
def get_txpos(self, tx_hash):
"return position, even if the tx is unverified"
with self.lock:
if tx_hash in self.verified_tx:
height, timestamp, pos = self.verified_tx[tx_hash]
return height, pos
elif tx_hash in self.unverified_tx:
height = self.unverified_tx[tx_hash]
return (height, 0) if height > 0 else ((1e9 - height), 0)
else:
return (1e9+1, 0)
def is_found(self):
return self.history.values() != [[]] * len(self.history)
def get_num_tx(self, address):
""" return number of transactions where address is involved """
return len(self.history.get(address, []))
def get_tx_delta(self, tx_hash, address):
"effect of tx on address"
delta = 0
# substract the value of coins sent from address
d = self.txi.get(tx_hash, {}).get(address, [])
for n, v in d:
delta -= v
# add the value of the coins received at address
d = self.txo.get(tx_hash, {}).get(address, [])
for n, v, cb in d:
delta += v
return delta
def get_tx_value(self, txid):
" effect of tx on the entire domain"
delta = 0
for addr, d in self.txi.get(txid, {}).items():
for n, v in d:
delta -= v
for addr, d in self.txo.get(txid, {}).items():
for n, v, cb in d:
delta += v
return delta
def get_wallet_delta(self, tx):
""" effect of tx on wallet """
is_relevant = False # "related to wallet?"
is_mine = False
is_pruned = False
is_partial = False
v_in = v_out = v_out_mine = 0
for txin in tx.inputs():
addr = self.get_txin_address(txin)
if self.is_mine(addr):
is_mine = True
is_relevant = True
d = self.txo.get(txin['prevout_hash'], {}).get(addr, [])
for n, v, cb in d:
if n == txin['prevout_n']:
value = v
break
else:
value = None
if value is None:
is_pruned = True
else:
v_in += value
else:
is_partial = True
if not is_mine:
is_partial = False
for addr, value in tx.get_outputs():
v_out += value
if self.is_mine(addr):
v_out_mine += value
is_relevant = True
if is_pruned:
# some inputs are mine:
fee = None
if is_mine:
v = v_out_mine - v_out
else:
# no input is mine
v = v_out_mine
else:
v = v_out_mine - v_in
if is_partial:
# some inputs are mine, but not all
fee = None
else:
# all inputs are mine
fee = v_in - v_out
if not is_mine:
fee = None
return is_relevant, is_mine, v, fee
def get_tx_info(self, tx):
is_relevant, is_mine, v, fee = self.get_wallet_delta(tx)
exp_n = None
can_broadcast = False
can_bump = False
label = ''
height = conf = timestamp = None
tx_hash = tx.txid()
if tx.is_complete():
if tx_hash in self.transactions.keys():
label = self.get_label(tx_hash)
height, conf, timestamp = self.get_tx_height(tx_hash)
if height > 0:
if conf:
status = _("{} confirmations").format(conf)
else:
status = _('Not verified')
elif height in (TX_HEIGHT_UNCONF_PARENT, TX_HEIGHT_UNCONFIRMED):
status = _('Unconfirmed')
if fee is None:
fee = self.tx_fees.get(tx_hash)
if fee and self.network and self.network.config.has_fee_mempool():
size = tx.estimated_size()
fee_per_byte = fee / size
exp_n = self.network.config.fee_to_depth(fee_per_byte)
can_bump = is_mine and not tx.is_final()
else:
status = _('Local')
can_broadcast = self.network is not None
else:
status = _("Signed")
can_broadcast = self.network is not None
else:
s, r = tx.signature_count()
status = _("Unsigned") if s == 0 else _('Partially signed') + ' (%d/%d)'%(s,r)
if is_relevant:
if is_mine:
if fee is not None:
amount = v + fee
else:
amount = v
else:
amount = v
else:
amount = None
return tx_hash, status, label, can_broadcast, can_bump, amount, fee, height, conf, timestamp, exp_n
def get_addr_io(self, address):
h = self.get_address_history(address)
received = {}
sent = {}
for tx_hash, height in h:
l = self.txo.get(tx_hash, {}).get(address, [])
for n, v, is_cb in l:
received[tx_hash + ':%d'%n] = (height, v, is_cb)
for tx_hash, height in h:
l = self.txi.get(tx_hash, {}).get(address, [])
for txi, v in l:
sent[txi] = height
return received, sent
def get_addr_utxo(self, address):
coins, spent = self.get_addr_io(address)
for txi in spent:
coins.pop(txi)
out = {}
for txo, v in coins.items():
tx_height, value, is_cb = v
prevout_hash, prevout_n = txo.split(':')
x = {
'address':address,
'value':value,
'prevout_n':int(prevout_n),
'prevout_hash':prevout_hash,
'height':tx_height,
'coinbase':is_cb
}
out[txo] = x
return out
# return the total amount ever received by an address
def get_addr_received(self, address):
received, sent = self.get_addr_io(address)
return sum([v for height, v, is_cb in received.values()])
# return the balance of a bitcoin address: confirmed and matured, unconfirmed, unmatured
def get_addr_balance(self, address):
received, sent = self.get_addr_io(address)
c = u = x = 0
local_height = self.get_local_height()
for txo, (tx_height, v, is_cb) in received.items():
if is_cb and tx_height + COINBASE_MATURITY > local_height:
x += v
elif tx_height > 0:
c += v
else:
u += v
if txo in sent:
if sent[txo] > 0:
c -= v
else:
u -= v
return c, u, x
def get_spendable_coins(self, domain, config):
confirmed_only = config.get('confirmed_only', False)
return self.get_utxos(domain, exclude_frozen=True, mature=True, confirmed_only=confirmed_only)
def get_utxos(self, domain = None, exclude_frozen = False, mature = False, confirmed_only = False):
coins = []
if domain is None:
domain = self.get_addresses()
domain = set(domain)
if exclude_frozen:
domain = set(domain) - self.frozen_addresses
for addr in domain:
utxos = self.get_addr_utxo(addr)
for x in utxos.values():
if confirmed_only and x['height'] <= 0:
continue
if mature and x['coinbase'] and x['height'] + COINBASE_MATURITY > self.get_local_height():
continue
coins.append(x)
continue
return coins
def dummy_address(self):
return self.get_receiving_addresses()[0]
def get_addresses(self):
out = []
out += self.get_receiving_addresses()
out += self.get_change_addresses()
return out
def get_frozen_balance(self):
return self.get_balance(self.frozen_addresses)
def get_balance(self, domain=None):
if domain is None:
domain = self.get_addresses()
domain = set(domain)
cc = uu = xx = 0
for addr in domain:
c, u, x = self.get_addr_balance(addr)
cc += c
uu += u
xx += x
return cc, uu, xx
def get_address_history(self, addr):
h = []
# we need self.transaction_lock but get_tx_height will take self.lock
# so we need to take that too here, to enforce order of locks
with self.lock, self.transaction_lock:
related_txns = self._history_local.get(addr, set())
for tx_hash in related_txns:
tx_height = self.get_tx_height(tx_hash)[0]
h.append((tx_hash, tx_height))
return h
def _add_tx_to_local_history(self, txid):
with self.transaction_lock:
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
cur_hist = self._history_local.get(addr, set())
cur_hist.add(txid)
self._history_local[addr] = cur_hist
def _remove_tx_from_local_history(self, txid):
with self.transaction_lock:
for addr in itertools.chain(self.txi.get(txid, []), self.txo.get(txid, [])):
cur_hist = self._history_local.get(addr, set())
try:
cur_hist.remove(txid)
except KeyError:
pass
else:
self._history_local[addr] = cur_hist
def get_txin_address(self, txi):
addr = txi.get('address')
if addr and addr != "(pubkey)":
return addr
prevout_hash = txi.get('prevout_hash')
prevout_n = txi.get('prevout_n')
dd = self.txo.get(prevout_hash, {})
for addr, l in dd.items():
for n, v, is_cb in l:
if n == prevout_n:
return addr
return None
def get_txout_address(self, txo):
_type, x, v = txo
if _type == TYPE_ADDRESS:
addr = x
elif _type == TYPE_PUBKEY:
addr = bitcoin.public_key_to_p2pkh(bfh(x))
else:
addr = None
return addr
def get_conflicting_transactions(self, tx):
"""Returns a set of transaction hashes from the wallet history that are
directly conflicting with tx, i.e. they have common outpoints being
spent with tx. If the tx is already in wallet history, that will not be
reported as a conflict.
"""
conflicting_txns = set()
with self.transaction_lock:
for txin in tx.inputs():
if txin['type'] == 'coinbase':
continue
prevout_hash = txin['prevout_hash']
prevout_n = txin['prevout_n']
spending_tx_hash = self.spent_outpoints[prevout_hash].get(prevout_n)
if spending_tx_hash is None:
continue
# this outpoint has already been spent, by spending_tx
assert spending_tx_hash in self.transactions
conflicting_txns |= {spending_tx_hash}
txid = tx.txid()
if txid in conflicting_txns:
# this tx is already in history, so it conflicts with itself
if len(conflicting_txns) > 1:
raise Exception('Found conflicting transactions already in wallet history.')
conflicting_txns -= {txid}
return conflicting_txns
def add_transaction(self, tx_hash, tx, allow_unrelated=False):
assert tx_hash, tx_hash
assert tx, tx
assert tx.is_complete()
# we need self.transaction_lock but get_tx_height will take self.lock
# so we need to take that too here, to enforce order of locks
with self.lock, self.transaction_lock:
# NOTE: returning if tx in self.transactions might seem like a good idea
# BUT we track is_mine inputs in a txn, and during subsequent calls
# of add_transaction tx, we might learn of more-and-more inputs of
# being is_mine, as we roll the gap_limit forward
is_coinbase = tx.inputs()[0]['type'] == 'coinbase'
tx_height = self.get_tx_height(tx_hash)[0]
if not allow_unrelated:
# note that during sync, if the transactions are not properly sorted,
# it could happen that we think tx is unrelated but actually one of the inputs is is_mine.
# this is the main motivation for allow_unrelated
is_mine = any([self.is_mine(self.get_txin_address(txin)) for txin in tx.inputs()])
is_for_me = any([self.is_mine(self.get_txout_address(txo)) for txo in tx.outputs()])
if not is_mine and not is_for_me:
raise UnrelatedTransactionException()
# Find all conflicting transactions.
# In case of a conflict,
# 1. confirmed > mempool > local
# 2. this new txn has priority over existing ones
# When this method exits, there must NOT be any conflict, so
# either keep this txn and remove all conflicting (along with dependencies)
# or drop this txn
conflicting_txns = self.get_conflicting_transactions(tx)
if conflicting_txns:
existing_mempool_txn = any(
self.get_tx_height(tx_hash2)[0] in (TX_HEIGHT_UNCONFIRMED, TX_HEIGHT_UNCONF_PARENT)
for tx_hash2 in conflicting_txns)
existing_confirmed_txn = any(
self.get_tx_height(tx_hash2)[0] > 0
for tx_hash2 in conflicting_txns)
if existing_confirmed_txn and tx_height <= 0:
# this is a non-confirmed tx that conflicts with confirmed txns; drop.
return False
if existing_mempool_txn and tx_height == TX_HEIGHT_LOCAL:
# this is a local tx that conflicts with non-local txns; drop.
return False
# keep this txn and remove all conflicting
to_remove = set()
to_remove |= conflicting_txns
for conflicting_tx_hash in conflicting_txns:
to_remove |= self.get_depending_transactions(conflicting_tx_hash)
for tx_hash2 in to_remove:
self.remove_transaction(tx_hash2)
# add inputs
def add_value_from_prev_output():
dd = self.txo.get(prevout_hash, {})
# note: this nested loop takes linear time in num is_mine outputs of prev_tx
for addr, outputs in dd.items():
# note: instead of [(n, v, is_cb), ...]; we could store: {n -> (v, is_cb)}
for n, v, is_cb in outputs:
if n == prevout_n:
if addr and self.is_mine(addr):
if d.get(addr) is None:
d[addr] = set()
d[addr].add((ser, v))
return
self.txi[tx_hash] = d = {}
for txi in tx.inputs():
if txi['type'] == 'coinbase':
continue
prevout_hash = txi['prevout_hash']
prevout_n = txi['prevout_n']
ser = prevout_hash + ':%d' % prevout_n
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
add_value_from_prev_output()
# add outputs
self.txo[tx_hash] = d = {}
for n, txo in enumerate(tx.outputs()):
v = txo[2]
ser = tx_hash + ':%d'%n
addr = self.get_txout_address(txo)
if addr and self.is_mine(addr):
if d.get(addr) is None:
d[addr] = []
d[addr].append((n, v, is_coinbase))
# give v to txi that spends me
next_tx = self.spent_outpoints[tx_hash].get(n)
if next_tx is not None:
dd = self.txi.get(next_tx, {})
if dd.get(addr) is None:
dd[addr] = set()
if (ser, v) not in dd[addr]:
dd[addr].add((ser, v))
self._add_tx_to_local_history(next_tx)
# add to local history
self._add_tx_to_local_history(tx_hash)
# save
self.transactions[tx_hash] = tx
return True
def remove_transaction(self, tx_hash):
def remove_from_spent_outpoints():
# undo spends in spent_outpoints
if tx is not None: # if we have the tx, this branch is faster
for txin in tx.inputs():
if txin['type'] == 'coinbase':
continue
prevout_hash = txin['prevout_hash']
prevout_n = txin['prevout_n']
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
else: # expensive but always works
for prevout_hash, d in list(self.spent_outpoints.items()):
for prevout_n, spending_txid in d.items():
if spending_txid == tx_hash:
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
# Remove this tx itself; if nothing spends from it.
# It is not so clear what to do if other txns spend from it, but it will be
# removed when those other txns are removed.
if not self.spent_outpoints[tx_hash]:
self.spent_outpoints.pop(tx_hash)
with self.transaction_lock:
self.print_error("removing tx from history", tx_hash)
tx = self.transactions.pop(tx_hash, None)
remove_from_spent_outpoints()
self._remove_tx_from_local_history(tx_hash)
self.txi.pop(tx_hash, None)
self.txo.pop(tx_hash, None)
def receive_tx_callback(self, tx_hash, tx, tx_height):
self.add_unverified_tx(tx_hash, tx_height)
self.add_transaction(tx_hash, tx, allow_unrelated=True)
def receive_history_callback(self, addr, hist, tx_fees):
with self.lock:
old_hist = self.get_address_history(addr)
for tx_hash, height in old_hist:
if (tx_hash, height) not in hist:
# make tx local
self.unverified_tx.pop(tx_hash, None)
self.verified_tx.pop(tx_hash, None)
if self.verifier:
self.verifier.remove_spv_proof_for_tx(tx_hash)
self.history[addr] = hist
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx(tx_hash, tx_height)
# if addr is new, we have to recompute txi and txo
tx = self.transactions.get(tx_hash)
if tx is None:
continue
self.add_transaction(tx_hash, tx, allow_unrelated=True)
# Store fees
self.tx_fees.update(tx_fees)
def get_history(self, domain=None):
# get domain
if domain is None:
domain = self.get_addresses()
domain = set(domain)
# 1. Get the history of each address in the domain, maintain the
# delta of a tx as the sum of its deltas on domain addresses
tx_deltas = defaultdict(int)
for addr in domain:
h = self.get_address_history(addr)
for tx_hash, height in h:
delta = self.get_tx_delta(tx_hash, addr)
if delta is None or tx_deltas[tx_hash] is None:
tx_deltas[tx_hash] = None
else:
tx_deltas[tx_hash] += delta
# 2. create sorted history
history = []
for tx_hash in tx_deltas:
delta = tx_deltas[tx_hash]
height, conf, timestamp = self.get_tx_height(tx_hash)
history.append((tx_hash, height, conf, timestamp, delta))
history.sort(key = lambda x: self.get_txpos(x[0]))
history.reverse()
# 3. add balance
c, u, x = self.get_balance(domain)
balance = c + u + x
h2 = []
for tx_hash, height, conf, timestamp, delta in history:
h2.append((tx_hash, height, conf, timestamp, delta, balance))
if balance is None or delta is None:
balance = None
else:
balance -= delta
h2.reverse()
# fixme: this may happen if history is incomplete
if balance not in [None, 0]:
self.print_error("Error: history not synchronized")
return []
return h2
def balance_at_timestamp(self, domain, target_timestamp):
h = self.get_history(domain)
for tx_hash, height, conf, timestamp, value, balance in h:
if timestamp > target_timestamp:
return balance - value
# return last balance
return balance
@profiler
def get_full_history(self, domain=None, from_timestamp=None, to_timestamp=None, fx=None, show_addresses=False):
from .util import timestamp_to_datetime, Satoshis, Fiat
out = []
income = 0
expenditures = 0
capital_gains = Decimal(0)
fiat_income = Decimal(0)
fiat_expenditures = Decimal(0)
h = self.get_history(domain)
for tx_hash, height, conf, timestamp, value, balance in h:
if from_timestamp and (timestamp or time.time()) < from_timestamp:
continue
if to_timestamp and (timestamp or time.time()) >= to_timestamp:
continue
item = {
'txid':tx_hash,
'height':height,
'confirmations':conf,
'timestamp':timestamp,
'value': Satoshis(value),
'balance': Satoshis(balance)
}
item['date'] = timestamp_to_datetime(timestamp)
item['label'] = self.get_label(tx_hash)
if show_addresses:
tx = self.transactions.get(tx_hash)
item['inputs'] = list(map(lambda x: dict((k, x[k]) for k in ('prevout_hash', 'prevout_n')), tx.inputs()))
item['outputs'] = list(map(lambda x:{'address':x[0], 'value':Satoshis(x[1])}, tx.get_outputs()))
# value may be None if wallet is not fully synchronized
if value is None:
continue
# fixme: use in and out values
if value < 0:
expenditures += -value
else:
income += value
# fiat computations
if fx and fx.is_enabled():
date = timestamp_to_datetime(timestamp)
fiat_value = self.get_fiat_value(tx_hash, fx.ccy)
fiat_default = fiat_value is None
fiat_value = fiat_value if fiat_value is not None else value / Decimal(COIN) * self.price_at_timestamp(tx_hash, fx.timestamp_rate)
item['fiat_value'] = Fiat(fiat_value, fx.ccy)
item['fiat_default'] = fiat_default
if value < 0:
acquisition_price = - value / Decimal(COIN) * self.average_price(tx_hash, fx.timestamp_rate, fx.ccy)
liquidation_price = - fiat_value
item['acquisition_price'] = Fiat(acquisition_price, fx.ccy)
cg = liquidation_price - acquisition_price
item['capital_gain'] = Fiat(cg, fx.ccy)
capital_gains += cg
fiat_expenditures += -fiat_value
else:
fiat_income += fiat_value
out.append(item)
# add summary
if out:
b, v = out[0]['balance'].value, out[0]['value'].value
start_balance = None if b is None or v is None else b - v
end_balance = out[-1]['balance'].value
if from_timestamp is not None and to_timestamp is not None:
start_date = timestamp_to_datetime(from_timestamp)
end_date = timestamp_to_datetime(to_timestamp)
else:
start_date = None
end_date = None
summary = {
'start_date': start_date,
'end_date': end_date,
'start_balance': Satoshis(start_balance),
'end_balance': Satoshis(end_balance),
'income': Satoshis(income),
'expenditures': Satoshis(expenditures)
}
if fx and fx.is_enabled():
unrealized = self.unrealized_gains(domain, fx.timestamp_rate, fx.ccy)
summary['capital_gains'] = Fiat(capital_gains, fx.ccy)
summary['fiat_income'] = Fiat(fiat_income, fx.ccy)
summary['fiat_expenditures'] = Fiat(fiat_expenditures, fx.ccy)
summary['unrealized_gains'] = Fiat(unrealized, fx.ccy)
summary['start_fiat_balance'] = Fiat(fx.historical_value(start_balance, start_date), fx.ccy)
summary['end_fiat_balance'] = Fiat(fx.historical_value(end_balance, end_date), fx.ccy)
summary['start_fiat_value'] = Fiat(fx.historical_value(COIN, start_date), fx.ccy)
summary['end_fiat_value'] = Fiat(fx.historical_value(COIN, end_date), fx.ccy)
else:
summary = {}
return {
'transactions': out,
'summary': summary
}
def get_label(self, tx_hash):
label = self.labels.get(tx_hash, '')
if label is '':
label = self.get_default_label(tx_hash)
return label
def get_default_label(self, tx_hash):
if self.txi.get(tx_hash) == {}:
d = self.txo.get(tx_hash, {})
labels = []
for addr in d.keys():
label = self.labels.get(addr)
if label:
labels.append(label)
return ', '.join(labels)
return ''
def get_tx_status(self, tx_hash, height, conf, timestamp):
from .util import format_time
extra = []
if conf == 0:
tx = self.transactions.get(tx_hash)
if not tx:
return 2, 'unknown'
is_final = tx and tx.is_final()
if not is_final:
extra.append('rbf')
fee = self.get_wallet_delta(tx)[3]
if fee is None:
fee = self.tx_fees.get(tx_hash)
if fee is not None:
size = tx.estimated_size()
fee_per_byte = fee / size
extra.append(format_fee_satoshis(fee_per_byte) + ' shell/b')
if fee is not None and height in (TX_HEIGHT_UNCONF_PARENT, TX_HEIGHT_UNCONFIRMED) \
and self.network and self.network.config.has_fee_mempool():
exp_n = self.network.config.fee_to_depth(fee_per_byte)
if exp_n:
extra.append('%.2f MB'%(exp_n/1000000))
if height == TX_HEIGHT_LOCAL:
status = 3
elif height == TX_HEIGHT_UNCONF_PARENT:
status = 1
elif height == TX_HEIGHT_UNCONFIRMED:
status = 0
else:
status = 2
else:
status = 3 + min(conf, 6)
time_str = format_time(timestamp) if timestamp else _("unknown")
status_str = TX_STATUS[status] if status < 4 else time_str
if extra:
status_str += ' [%s]'%(', '.join(extra))
return status, status_str
def relayfee(self):
return relayfee(self.network)
def dust_threshold(self):
return dust_threshold(self.network)
def make_unsigned_transaction(self, inputs, outputs, config, fixed_fee=None,
change_addr=None, is_sweep=False):
# check outputs
i_max = None
for i, o in enumerate(outputs):
_type, data, value = o
if _type == TYPE_ADDRESS:
if not is_address(data):
raise Exception("Invalid SHIELD address: {}".format(data))
if value == '!':
if i_max is not None:
raise Exception("More than one output set to spend max")
i_max = i
# Avoid index-out-of-range with inputs[0] below
if not inputs:
raise NotEnoughFunds()
if fixed_fee is None and config.fee_per_kb() is None:
raise NoDynamicFeeEstimates()
for item in inputs:
self.add_input_info(item)
# change address
if change_addr:
change_addrs = [change_addr]
else:
addrs = self.get_change_addresses()[-self.gap_limit_for_change:]
if self.use_change and addrs:
# New change addresses are created only after a few
# confirmations. Select the unused addresses within the
# gap limit; if none take one at random
change_addrs = [addr for addr in addrs if
self.get_num_tx(addr) == 0]
if not change_addrs:
change_addrs = [random.choice(addrs)]
else:
# coin_chooser will set change address
change_addrs = []
# Fee estimator
if fixed_fee is None:
fee_estimator = config.estimate_fee
elif isinstance(fixed_fee, Number):
fee_estimator = lambda size: fixed_fee
elif callable(fixed_fee):
fee_estimator = fixed_fee
else:
raise Exception('Invalid argument fixed_fee: %s' % fixed_fee)
if i_max is None:
# Let the coin chooser select the coins to spend
max_change = self.max_change_outputs if self.multiple_change else 1
coin_chooser = coinchooser.get_coin_chooser(config)
tx = coin_chooser.make_tx(inputs, outputs, change_addrs[:max_change],
fee_estimator, self.dust_threshold())
else:
# FIXME?? this might spend inputs with negative effective value...
sendable = sum(map(lambda x:x['value'], inputs))
_type, data, value = outputs[i_max]
outputs[i_max] = (_type, data, 0)
tx = Transaction.from_io(inputs, outputs[:])
fee = fee_estimator(tx.estimated_size())
amount = sendable - tx.output_value() - fee
if amount < 0:
raise NotEnoughFunds()
outputs[i_max] = (_type, data, amount)
tx = Transaction.from_io(inputs, outputs[:])
# Sort the inputs and outputs deterministically
tx.BIP_LI01_sort()
# Timelock tx to current height.
tx.locktime = self.get_local_height()
run_hook('make_unsigned_transaction', self, tx)
return tx
def mktx(self, outputs, password, config, fee=None, change_addr=None, domain=None):
coins = self.get_spendable_coins(domain, config)
tx = self.make_unsigned_transaction(coins, outputs, config, fee, change_addr)
self.sign_transaction(tx, password)
return tx
def is_frozen(self, addr):
return addr in self.frozen_addresses
def set_frozen_state(self, addrs, freeze):
'''Set frozen state of the addresses to FREEZE, True or False'''
if all(self.is_mine(addr) for addr in addrs):
if freeze:
self.frozen_addresses |= set(addrs)
else:
self.frozen_addresses -= set(addrs)
self.storage.put('frozen_addresses', list(self.frozen_addresses))
return True
return False
def load_unverified_transactions(self):
# review transactions that are in the history
for addr, hist in self.history.items():
for tx_hash, tx_height in hist:
# add it in case it was previously unconfirmed
self.add_unverified_tx(tx_hash, tx_height)
def start_threads(self, network):
self.network = network
if self.network is not None:
self.verifier = SPV(self.network, self)
self.synchronizer = Synchronizer(self, network)
network.add_jobs([self.verifier, self.synchronizer])
else:
self.verifier = None
self.synchronizer = None
def stop_threads(self):
if self.network:
self.network.remove_jobs([self.synchronizer, self.verifier])
self.synchronizer.release()
self.synchronizer = None
self.verifier = None
# Now no references to the synchronizer or verifier
# remain so they will be GC-ed
self.storage.put('stored_height', self.get_local_height())
self.save_transactions()
self.save_verified_tx()
self.storage.write()
def wait_until_synchronized(self, callback=None):
def wait_for_wallet():
self.set_up_to_date(False)
while not self.is_up_to_date():
if callback:
msg = "%s\n%s %d"%(
_("Please wait..."),
_("Addresses generated:"),
len(self.addresses(True)))
callback(msg)
time.sleep(0.1)
def wait_for_network():
while not self.network.is_connected():
if callback:
msg = "%s \n" % (_("Connecting..."))
callback(msg)
time.sleep(0.1)
# wait until we are connected, because the user
# might have selected another server
if self.network:
wait_for_network()
wait_for_wallet()
else:
self.synchronize()
def can_export(self):
return not self.is_watching_only() and hasattr(self.keystore, 'get_private_key')
def is_used(self, address):
h = self.history.get(address,[])
if len(h) == 0:
return False
c, u, x = self.get_addr_balance(address)
return c + u + x == 0
def is_empty(self, address):
c, u, x = self.get_addr_balance(address)
return c+u+x == 0
def address_is_old(self, address, age_limit=2):
age = -1
h = self.history.get(address, [])
for tx_hash, tx_height in h:
if tx_height <= 0:
tx_age = 0
else:
tx_age = self.get_local_height() - tx_height + 1
if tx_age > age:
age = tx_age
return age > age_limit
def bump_fee(self, tx, delta):
if tx.is_final():
raise CannotBumpFee(_('Cannot bump fee') + ': ' + _('transaction is final'))
tx = Transaction(tx.serialize())
tx.deserialize(force_full_parse=True) # need to parse inputs
inputs = copy.deepcopy(tx.inputs())
outputs = copy.deepcopy(tx.outputs())
for txin in inputs:
txin['signatures'] = [None] * len(txin['signatures'])
self.add_input_info(txin)
# use own outputs
s = list(filter(lambda x: self.is_mine(x[1]), outputs))
# ... unless there is none
if not s:
s = outputs
x_fee = run_hook('get_tx_extra_fee', self, tx)
if x_fee:
x_fee_address, x_fee_amount = x_fee
s = filter(lambda x: x[1]!=x_fee_address, s)
# prioritize low value outputs, to get rid of dust
s = sorted(s, key=lambda x: x[2])
for o in s:
i = outputs.index(o)
otype, address, value = o
if value - delta >= self.dust_threshold():
outputs[i] = otype, address, value - delta
delta = 0
break
else:
del outputs[i]
delta -= value
if delta > 0:
continue
if delta > 0:
raise CannotBumpFee(_('Cannot bump fee') + ': ' + _('could not find suitable outputs'))
locktime = self.get_local_height()
tx_new = Transaction.from_io(inputs, outputs, locktime=locktime)
tx_new.BIP_LI01_sort()
return tx_new
def cpfp(self, tx, fee):
txid = tx.txid()
for i, o in enumerate(tx.outputs()):
otype, address, value = o
if otype == TYPE_ADDRESS and self.is_mine(address):
break
else:
return
coins = self.get_addr_utxo(address)
item = coins.get(txid+':%d'%i)
if not item:
return
self.add_input_info(item)
inputs = [item]
outputs = [(TYPE_ADDRESS, address, value - fee)]
locktime = self.get_local_height()
# note: no need to call tx.BIP_LI01_sort() here - single input/output
return Transaction.from_io(inputs, outputs, locktime=locktime)
def add_input_sig_info(self, txin, address):
raise NotImplementedError() # implemented by subclasses
def add_input_info(self, txin):
address = txin['address']
if self.is_mine(address):
txin['type'] = self.get_txin_type(address)
# segwit needs value to sign
if txin.get('value') is None and Transaction.is_input_value_needed(txin):
received, spent = self.get_addr_io(address)
item = received.get(txin['prevout_hash']+':%d'%txin['prevout_n'])
tx_height, value, is_cb = item
txin['value'] = value
self.add_input_sig_info(txin, address)
def add_input_info_to_all_inputs(self, tx):
if tx.is_complete():
return
for txin in tx.inputs():
self.add_input_info(txin)
def can_sign(self, tx):
if tx.is_complete():
return False
# add info to inputs if we can; otherwise we might return a false negative:
self.add_input_info_to_all_inputs(tx) # though note that this is a side-effect
for k in self.get_keystores():
if k.can_sign(tx):
return True
return False
def get_input_tx(self, tx_hash, ignore_timeout=False):
# First look up an input transaction in the wallet where it
# will likely be. If co-signing a transaction it may not have
# all the input txs, in which case we ask the network.
tx = self.transactions.get(tx_hash, None)
if not tx and self.network:
try:
tx = Transaction(self.network.get_transaction(tx_hash))
except TimeoutException as e:
self.print_error('getting input txn from network timed out for {}'.format(tx_hash))
if not ignore_timeout:
raise e
return tx
def add_hw_info(self, tx):
# add previous tx for hw wallets
for txin in tx.inputs():
tx_hash = txin['prevout_hash']
# segwit inputs might not be needed for some hw wallets
ignore_timeout = Transaction.is_segwit_input(txin)
txin['prev_tx'] = self.get_input_tx(tx_hash, ignore_timeout)
# add output info for hw wallets
info = {}
xpubs = self.get_master_public_keys()
for txout in tx.outputs():
_type, addr, amount = txout
if self.is_mine(addr):
index = self.get_address_index(addr)
pubkeys = self.get_public_keys(addr)
# sort xpubs using the order of pubkeys
sorted_pubkeys, sorted_xpubs = zip(*sorted(zip(pubkeys, xpubs)))
info[addr] = index, sorted_xpubs, self.m if isinstance(self, Multisig_Wallet) else None
tx.output_info = info
def sign_transaction(self, tx, password):
if self.is_watching_only():
return
self.add_input_info_to_all_inputs(tx)
# hardware wallets require extra info
if any([(isinstance(k, Hardware_KeyStore) and k.can_sign(tx)) for k in self.get_keystores()]):
self.add_hw_info(tx)
# sign. start with ready keystores.
for k in sorted(self.get_keystores(), key=lambda ks: ks.ready_to_sign(), reverse=True):
try:
if k.can_sign(tx):
k.sign_transaction(tx, password)
except UserCancelled:
continue
return tx
def get_unused_addresses(self):
# fixme: use slots from expired requests
domain = self.get_receiving_addresses()
return [addr for addr in domain if not self.history.get(addr)
and addr not in self.receive_requests.keys()]
def get_unused_address(self):
addrs = self.get_unused_addresses()
if addrs:
return addrs[0]
def get_receiving_address(self):
# always return an address
domain = self.get_receiving_addresses()
if not domain:
return
choice = domain[0]
for addr in domain:
if not self.history.get(addr):
if addr not in self.receive_requests.keys():
return addr
else:
choice = addr
return choice
def get_payment_status(self, address, amount):
local_height = self.get_local_height()
received, sent = self.get_addr_io(address)
l = []
for txo, x in received.items():
h, v, is_cb = x
txid, n = txo.split(':')
info = self.verified_tx.get(txid)
if info:
tx_height, timestamp, pos = info
conf = local_height - tx_height
else:
conf = 0
l.append((conf, v))
vsum = 0
for conf, v in reversed(sorted(l)):
vsum += v
if vsum >= amount:
return True, conf
return False, None
def get_payment_request(self, addr, config):
r = self.receive_requests.get(addr)
if not r:
return
out = copy.copy(r)
out['URI'] = 'shield:' + addr + '?amount=' + format_satoshis(out.get('amount'))
status, conf = self.get_request_status(addr)
out['status'] = status
if conf is not None:
out['confirmations'] = conf
# check if bip70 file exists
rdir = config.get('requests_dir')
if rdir:
key = out.get('id', addr)
path = os.path.join(rdir, 'req', key[0], key[1], key)
if os.path.exists(path):
baseurl = 'file://' + rdir
rewrite = config.get('url_rewrite')
if rewrite:
try:
baseurl = baseurl.replace(*rewrite)
except BaseException as e:
self.print_stderr('Invalid config setting for "url_rewrite". err:', e)
out['request_url'] = os.path.join(baseurl, 'req', key[0], key[1], key, key)
out['URI'] += '&r=' + out['request_url']
out['index_url'] = os.path.join(baseurl, 'index.html') + '?id=' + key
websocket_server_announce = config.get('websocket_server_announce')
if websocket_server_announce:
out['websocket_server'] = websocket_server_announce
else:
out['websocket_server'] = config.get('websocket_server', 'localhost')
websocket_port_announce = config.get('websocket_port_announce')
if websocket_port_announce:
out['websocket_port'] = websocket_port_announce
else:
out['websocket_port'] = config.get('websocket_port', 9999)
return out
def get_request_status(self, key):
r = self.receive_requests.get(key)
if r is None:
return PR_UNKNOWN
address = r['address']
amount = r.get('amount')
timestamp = r.get('time', 0)
if timestamp and type(timestamp) != int:
timestamp = 0
expiration = r.get('exp')
if expiration and type(expiration) != int:
expiration = 0
conf = None
if amount:
if self.up_to_date:
paid, conf = self.get_payment_status(address, amount)
status = PR_PAID if paid else PR_UNPAID
if status == PR_UNPAID and expiration is not None and time.time() > timestamp + expiration:
status = PR_EXPIRED
else:
status = PR_UNKNOWN
else:
status = PR_UNKNOWN
return status, conf
def make_payment_request(self, addr, amount, message, expiration):
timestamp = int(time.time())
_id = bh2u(Hash(addr + "%d"%timestamp))[0:10]
r = {'time':timestamp, 'amount':amount, 'exp':expiration, 'address':addr, 'memo':message, 'id':_id}
return r
def sign_payment_request(self, key, alias, alias_addr, password):
req = self.receive_requests.get(key)
alias_privkey = self.export_private_key(alias_addr, password)[0]
pr = paymentrequest.make_unsigned_request(req)
paymentrequest.sign_request_with_alias(pr, alias, alias_privkey)
req['name'] = pr.pki_data
req['sig'] = bh2u(pr.signature)
self.receive_requests[key] = req
self.storage.put('payment_requests', self.receive_requests)
def add_payment_request(self, req, config):
addr = req['address']
if not bitcoin.is_address(addr):
raise Exception(_('Invalid SHIELD address.'))
if not self.is_mine(addr):
raise Exception(_('Address not in wallet.'))
amount = req.get('amount')
message = req.get('memo')
self.receive_requests[addr] = req
self.storage.put('payment_requests', self.receive_requests)
self.set_label(addr, message) # should be a default label
rdir = config.get('requests_dir')
if rdir and amount is not None:
key = req.get('id', addr)
pr = paymentrequest.make_request(config, req)
path = os.path.join(rdir, 'req', key[0], key[1], key)
if not os.path.exists(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
with open(os.path.join(path, key), 'wb') as f:
f.write(pr.SerializeToString())
# reload
req = self.get_payment_request(addr, config)
with open(os.path.join(path, key + '.json'), 'w', encoding='utf-8') as f:
f.write(json.dumps(req))
return req
def remove_payment_request(self, addr, config):
if addr not in self.receive_requests:
return False
r = self.receive_requests.pop(addr)
rdir = config.get('requests_dir')
if rdir:
key = r.get('id', addr)
for s in ['.json', '']:
n = os.path.join(rdir, 'req', key[0], key[1], key, key + s)
if os.path.exists(n):
os.unlink(n)
self.storage.put('payment_requests', self.receive_requests)
return True
def get_sorted_requests(self, config):
def f(addr):
try:
return self.get_address_index(addr)
except:
return
keys = map(lambda x: (f(x), x), self.receive_requests.keys())
sorted_keys = sorted(filter(lambda x: x[0] is not None, keys))
return [self.get_payment_request(x[1], config) for x in sorted_keys]
def get_fingerprint(self):
raise NotImplementedError()
def can_import_privkey(self):
return False
def can_import_address(self):
return False
def can_delete_address(self):
return False
def add_address(self, address):
if address not in self.history:
self.history[address] = []
if self.synchronizer:
self.synchronizer.add(address)
def has_password(self):
return self.has_keystore_encryption() or self.has_storage_encryption()
def can_have_keystore_encryption(self):
return self.keystore and self.keystore.may_have_password()
def get_available_storage_encryption_version(self):
"""Returns the type of storage encryption offered to the user.
A wallet file (storage) is either encrypted with this version
or is stored in plaintext.
"""
if isinstance(self.keystore, Hardware_KeyStore):
return STO_EV_XPUB_PW
else:
return STO_EV_USER_PW
def has_keystore_encryption(self):
"""Returns whether encryption is enabled for the keystore.
If True, e.g. signing a transaction will require a password.
"""
if self.can_have_keystore_encryption():
return self.storage.get('use_encryption', False)
return False
def has_storage_encryption(self):
"""Returns whether encryption is enabled for the wallet file on disk."""
return self.storage.is_encrypted()
@classmethod
def may_have_password(cls):
return True
def check_password(self, password):
if self.has_keystore_encryption():
self.keystore.check_password(password)
self.storage.check_password(password)
def update_password(self, old_pw, new_pw, encrypt_storage=False):
if old_pw is None and self.has_password():
raise InvalidPassword()
self.check_password(old_pw)
if encrypt_storage:
enc_version = self.get_available_storage_encryption_version()
else:
enc_version = STO_EV_PLAINTEXT
self.storage.set_password(new_pw, enc_version)
# note: Encrypting storage with a hw device is currently only
# allowed for non-multisig wallets. Further,
# Hardware_KeyStore.may_have_password() == False.
# If these were not the case,
# extra care would need to be taken when encrypting keystores.
self._update_password_for_keystore(old_pw, new_pw)
encrypt_keystore = self.can_have_keystore_encryption()
self.storage.set_keystore_encryption(bool(new_pw) and encrypt_keystore)
self.storage.write()
def sign_message(self, address, message, password):
index = self.get_address_index(address)
return self.keystore.sign_message(index, message, password)
def decrypt_message(self, pubkey, message, password):
addr = self.pubkeys_to_address(pubkey)
index = self.get_address_index(addr)
return self.keystore.decrypt_message(index, message, password)
def get_depending_transactions(self, tx_hash):
"""Returns all (grand-)children of tx_hash in this wallet."""
children = set()
# TODO rewrite this to use self.spent_outpoints
for other_hash, tx in self.transactions.items():
for input in (tx.inputs()):
if input["prevout_hash"] == tx_hash:
children.add(other_hash)
children |= self.get_depending_transactions(other_hash)
return children
def txin_value(self, txin):
txid = txin['prevout_hash']
prev_n = txin['prevout_n']
for address, d in self.txo.get(txid, {}).items():
for n, v, cb in d:
if n == prev_n:
return v
# may occur if wallet is not synchronized
return None
def price_at_timestamp(self, txid, price_func):
"""Returns fiat price of SHIELD at the time tx got confirmed."""
height, conf, timestamp = self.get_tx_height(txid)
return price_func(timestamp if timestamp else time.time())
def unrealized_gains(self, domain, price_func, ccy):
coins = self.get_utxos(domain)
now = time.time()
p = price_func(now)
ap = sum(self.coin_price(coin['prevout_hash'], price_func, ccy, self.txin_value(coin)) for coin in coins)
lp = sum([coin['value'] for coin in coins]) * p / Decimal(COIN)
return lp - ap
def average_price(self, txid, price_func, ccy):
""" Average acquisition price of the inputs of a transaction """
input_value = 0
total_price = 0
for addr, d in self.txi.get(txid, {}).items():
for ser, v in d:
input_value += v
total_price += self.coin_price(ser.split(':')[0], price_func, ccy, v)
return total_price / (input_value/Decimal(COIN))
def coin_price(self, txid, price_func, ccy, txin_value):
"""
Acquisition price of a coin.
This assumes that either all inputs are mine, or no input is mine.
"""
if txin_value is None:
return Decimal('NaN')
cache_key = "{}:{}:{}".format(str(txid), str(ccy), str(txin_value))
result = self.coin_price_cache.get(cache_key, None)
if result is not None:
return result
if self.txi.get(txid, {}) != {}:
result = self.average_price(txid, price_func, ccy) * txin_value/Decimal(COIN)
self.coin_price_cache[cache_key] = result
return result
else:
fiat_value = self.get_fiat_value(txid, ccy)
if fiat_value is not None:
return fiat_value
else:
p = self.price_at_timestamp(txid, price_func)
return p * txin_value/Decimal(COIN)
def is_billing_address(self, addr):
# overloaded for TrustedCoin wallets
return False
class Simple_Wallet(Abstract_Wallet):
# wallet with a single keystore
def get_keystore(self):
return self.keystore
def get_keystores(self):
return [self.keystore]
def is_watching_only(self):
return self.keystore.is_watching_only()
def _update_password_for_keystore(self, old_pw, new_pw):
if self.keystore and self.keystore.may_have_password():
self.keystore.update_password(old_pw, new_pw)
self.save_keystore()
def save_keystore(self):
self.storage.put('keystore', self.keystore.dump())
class Imported_Wallet(Simple_Wallet):
# wallet made of imported addresses
wallet_type = 'imported'
txin_type = 'address'
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
def is_watching_only(self):
return self.keystore is None
def get_keystores(self):
return [self.keystore] if self.keystore else []
def can_import_privkey(self):
return bool(self.keystore)
def load_keystore(self):
self.keystore = load_keystore(self.storage, 'keystore') if self.storage.get('keystore') else None
def save_keystore(self):
self.storage.put('keystore', self.keystore.dump())
def load_addresses(self):
self.addresses = self.storage.get('addresses', {})
# fixme: a reference to addresses is needed
if self.keystore:
self.keystore.addresses = self.addresses
def save_addresses(self):
self.storage.put('addresses', self.addresses)
def can_import_address(self):
return self.is_watching_only()
def can_delete_address(self):
return True
def has_seed(self):
return False
def is_deterministic(self):
return False
def is_change(self, address):
return False
def get_master_public_keys(self):
return []
def is_beyond_limit(self, address):
return False
def is_mine(self, address):
return address in self.addresses
def get_fingerprint(self):
return ''
def get_addresses(self, include_change=False):
return sorted(self.addresses.keys())
def get_receiving_addresses(self):
return self.get_addresses()
def get_change_addresses(self):
return []
def import_address(self, address):
if not bitcoin.is_address(address):
return ''
if address in self.addresses:
return ''
self.addresses[address] = {}
self.storage.put('addresses', self.addresses)
self.storage.write()
self.add_address(address)
return address
def delete_address(self, address):
if address not in self.addresses:
return
transactions_to_remove = set() # only referred to by this address
transactions_new = set() # txs that are not only referred to by address
with self.lock:
for addr, details in self.history.items():
if addr == address:
for tx_hash, height in details:
transactions_to_remove.add(tx_hash)
else:
for tx_hash, height in details:
transactions_new.add(tx_hash)
transactions_to_remove -= transactions_new
self.history.pop(address, None)
for tx_hash in transactions_to_remove:
self.remove_transaction(tx_hash)
self.tx_fees.pop(tx_hash, None)
self.verified_tx.pop(tx_hash, None)
self.unverified_tx.pop(tx_hash, None)
self.transactions.pop(tx_hash, None)
self.storage.put('verified_tx3', self.verified_tx)
self.save_transactions()
self.set_label(address, None)
self.remove_payment_request(address, {})
self.set_frozen_state([address], False)
pubkey = self.get_public_key(address)
self.addresses.pop(address)
if pubkey:
# delete key iff no other address uses it (e.g. p2pkh and p2wpkh for same key)
for txin_type in bitcoin.WIF_SCRIPT_TYPES.keys():
try:
addr2 = bitcoin.pubkey_to_address(txin_type, pubkey)
except NotImplementedError:
pass
else:
if addr2 in self.addresses:
break
else:
self.keystore.delete_imported_key(pubkey)
self.save_keystore()
self.storage.put('addresses', self.addresses)
self.storage.write()
def get_address_index(self, address):
return self.get_public_key(address)
def get_public_key(self, address):
return self.addresses[address].get('pubkey')
def import_private_key(self, sec, pw, redeem_script=None):
try:
txin_type, pubkey = self.keystore.import_privkey(sec, pw)
except Exception:
neutered_privkey = str(sec)[:3] + '..' + str(sec)[-2:]
raise BitcoinException('Invalid private key: {}'.format(neutered_privkey))
if txin_type in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']:
if redeem_script is not None:
raise BitcoinException('Cannot use redeem script with script type {}'.format(txin_type))
addr = bitcoin.pubkey_to_address(txin_type, pubkey)
elif txin_type in ['p2sh', 'p2wsh', 'p2wsh-p2sh']:
if redeem_script is None:
raise BitcoinException('Redeem script required for script type {}'.format(txin_type))
addr = bitcoin.redeem_script_to_address(txin_type, redeem_script)
else:
raise NotImplementedError(txin_type)
self.addresses[addr] = {'type':txin_type, 'pubkey':pubkey, 'redeem_script':redeem_script}
self.save_keystore()
self.save_addresses()
self.storage.write()
self.add_address(addr)
return addr
def get_redeem_script(self, address):
d = self.addresses[address]
redeem_script = d['redeem_script']
return redeem_script
def get_txin_type(self, address):
return self.addresses[address].get('type', 'address')
def add_input_sig_info(self, txin, address):
if self.is_watching_only():
x_pubkey = 'fd' + address_to_script(address)
txin['x_pubkeys'] = [x_pubkey]
txin['signatures'] = [None]
return
if txin['type'] in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']:
pubkey = self.addresses[address]['pubkey']
txin['num_sig'] = 1
txin['x_pubkeys'] = [pubkey]
txin['signatures'] = [None]
else:
raise NotImplementedError('imported wallets for p2sh are not implemented')
def pubkeys_to_address(self, pubkey):
for addr, v in self.addresses.items():
if v.get('pubkey') == pubkey:
return addr
class Deterministic_Wallet(Abstract_Wallet):
def __init__(self, storage):
Abstract_Wallet.__init__(self, storage)
self.gap_limit = storage.get('gap_limit', 20)
def has_seed(self):
return self.keystore.has_seed()
def get_receiving_addresses(self):
return self.receiving_addresses
def get_change_addresses(self):
return self.change_addresses
def get_seed(self, password):
return self.keystore.get_seed(password)
def add_seed(self, seed, pw):
self.keystore.add_seed(seed, pw)
def change_gap_limit(self, value):
'''This method is not called in the code, it is kept for console use'''
if value >= self.gap_limit:
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit)
return True
elif value >= self.min_acceptable_gap():
addresses = self.get_receiving_addresses()
k = self.num_unused_trailing_addresses(addresses)
n = len(addresses) - k + value
self.receiving_addresses = self.receiving_addresses[0:n]
self.gap_limit = value
self.storage.put('gap_limit', self.gap_limit)
self.save_addresses()
return True
else:
return False
def num_unused_trailing_addresses(self, addresses):
k = 0
for a in addresses[::-1]:
if self.history.get(a):break
k = k + 1
return k
def min_acceptable_gap(self):
# fixme: this assumes wallet is synchronized
n = 0
nmax = 0
addresses = self.get_receiving_addresses()
k = self.num_unused_trailing_addresses(addresses)
for a in addresses[0:-k]:
if self.history.get(a):
n = 0
else:
n += 1
if n > nmax: nmax = n
return nmax + 1
def load_addresses(self):
super().load_addresses()
self._addr_to_addr_index = {} # key: address, value: (is_change, index)
for i, addr in enumerate(self.receiving_addresses):
self._addr_to_addr_index[addr] = (False, i)
for i, addr in enumerate(self.change_addresses):
self._addr_to_addr_index[addr] = (True, i)
def create_new_address(self, for_change=False):
assert type(for_change) is bool
with self.lock:
addr_list = self.change_addresses if for_change else self.receiving_addresses
n = len(addr_list)
x = self.derive_pubkeys(for_change, n)
address = self.pubkeys_to_address(x)
addr_list.append(address)
self._addr_to_addr_index[address] = (for_change, n)
self.save_addresses()
self.add_address(address)
return address
def synchronize_sequence(self, for_change):
limit = self.gap_limit_for_change if for_change else self.gap_limit
while True:
addresses = self.get_change_addresses() if for_change else self.get_receiving_addresses()
if len(addresses) < limit:
self.create_new_address(for_change)
continue
if list(map(lambda a: self.address_is_old(a), addresses[-limit:] )) == limit*[False]:
break
else:
self.create_new_address(for_change)
def synchronize(self):
with self.lock:
self.synchronize_sequence(False)
self.synchronize_sequence(True)
def is_beyond_limit(self, address):
is_change, i = self.get_address_index(address)
addr_list = self.get_change_addresses() if is_change else self.get_receiving_addresses()
limit = self.gap_limit_for_change if is_change else self.gap_limit
if i < limit:
return False
prev_addresses = addr_list[max(0, i - limit):max(0, i)]
for addr in prev_addresses:
if self.history.get(addr):
return False
return True
def is_mine(self, address):
return address in self._addr_to_addr_index
def get_address_index(self, address):
return self._addr_to_addr_index[address]
def get_master_public_keys(self):
return [self.get_master_public_key()]
def get_fingerprint(self):
return self.get_master_public_key()
def get_txin_type(self, address):
return self.txin_type
class Simple_Deterministic_Wallet(Simple_Wallet, Deterministic_Wallet):
""" Deterministic Wallet with a single pubkey per address """
def __init__(self, storage):
Deterministic_Wallet.__init__(self, storage)
def get_public_key(self, address):
sequence = self.get_address_index(address)
pubkey = self.get_pubkey(*sequence)
return pubkey
def load_keystore(self):
self.keystore = load_keystore(self.storage, 'keystore')
try:
xtype = bitcoin.xpub_type(self.keystore.xpub)
except:
xtype = 'standard'
self.txin_type = 'p2pkh' if xtype == 'standard' else xtype
def get_pubkey(self, c, i):
return self.derive_pubkeys(c, i)
def add_input_sig_info(self, txin, address):
derivation = self.get_address_index(address)
x_pubkey = self.keystore.get_xpubkey(*derivation)
txin['x_pubkeys'] = [x_pubkey]
txin['signatures'] = [None]
txin['num_sig'] = 1
def get_master_public_key(self):
return self.keystore.get_master_public_key()
def derive_pubkeys(self, c, i):
return self.keystore.derive_pubkey(c, i)
class Standard_Wallet(Simple_Deterministic_Wallet):
wallet_type = 'standard'
def pubkeys_to_address(self, pubkey):
return bitcoin.pubkey_to_address(self.txin_type, pubkey)
class Multisig_Wallet(Deterministic_Wallet):
# generic m of n
gap_limit = 20
def __init__(self, storage):
self.wallet_type = storage.get('wallet_type')
self.m, self.n = multisig_type(self.wallet_type)
Deterministic_Wallet.__init__(self, storage)
def get_pubkeys(self, c, i):
return self.derive_pubkeys(c, i)
def get_public_keys(self, address):
sequence = self.get_address_index(address)
return self.get_pubkeys(*sequence)
def pubkeys_to_address(self, pubkeys):
redeem_script = self.pubkeys_to_redeem_script(pubkeys)
return bitcoin.redeem_script_to_address(self.txin_type, redeem_script)
def pubkeys_to_redeem_script(self, pubkeys):
return transaction.multisig_script(sorted(pubkeys), self.m)
def get_redeem_script(self, address):
pubkeys = self.get_public_keys(address)
redeem_script = self.pubkeys_to_redeem_script(pubkeys)
return redeem_script
def derive_pubkeys(self, c, i):
return [k.derive_pubkey(c, i) for k in self.get_keystores()]
def load_keystore(self):
self.keystores = {}
for i in range(self.n):
name = 'x%d/'%(i+1)
self.keystores[name] = load_keystore(self.storage, name)
self.keystore = self.keystores['x1/']
xtype = bitcoin.xpub_type(self.keystore.xpub)
self.txin_type = 'p2sh' if xtype == 'standard' else xtype
def save_keystore(self):
for name, k in self.keystores.items():
self.storage.put(name, k.dump())
def get_keystore(self):
return self.keystores.get('x1/')
def get_keystores(self):
return [self.keystores[i] for i in sorted(self.keystores.keys())]
def can_have_keystore_encryption(self):
return any([k.may_have_password() for k in self.get_keystores()])
def _update_password_for_keystore(self, old_pw, new_pw):
for name, keystore in self.keystores.items():
if keystore.may_have_password():
keystore.update_password(old_pw, new_pw)
self.storage.put(name, keystore.dump())
def check_password(self, password):
for name, keystore in self.keystores.items():
if keystore.may_have_password():
keystore.check_password(password)
self.storage.check_password(password)
def get_available_storage_encryption_version(self):
# multisig wallets are not offered hw device encryption
return STO_EV_USER_PW
def has_seed(self):
return self.keystore.has_seed()
def is_watching_only(self):
return not any([not k.is_watching_only() for k in self.get_keystores()])
def get_master_public_key(self):
return self.keystore.get_master_public_key()
def get_master_public_keys(self):
return [k.get_master_public_key() for k in self.get_keystores()]
def get_fingerprint(self):
return ''.join(sorted(self.get_master_public_keys()))
def add_input_sig_info(self, txin, address):
# x_pubkeys are not sorted here because it would be too slow
# they are sorted in transaction.get_sorted_pubkeys
# pubkeys is set to None to signal that x_pubkeys are unsorted
derivation = self.get_address_index(address)
x_pubkeys_expected = [k.get_xpubkey(*derivation) for k in self.get_keystores()]
x_pubkeys_actual = txin.get('x_pubkeys')
# if 'x_pubkeys' is already set correctly (ignoring order, as above), leave it.
# otherwise we might delete signatures
if x_pubkeys_actual and set(x_pubkeys_actual) == set(x_pubkeys_expected):
return
txin['x_pubkeys'] = x_pubkeys_expected
txin['pubkeys'] = None
# we need n place holders
txin['signatures'] = [None] * self.n
txin['num_sig'] = self.m
wallet_types = ['standard', 'multisig', 'imported']
def register_wallet_type(category):
wallet_types.append(category)
wallet_constructors = {
'standard': Standard_Wallet,
'old': Standard_Wallet,
'xpub': Standard_Wallet,
'imported': Imported_Wallet
}
def register_constructor(wallet_type, constructor):
wallet_constructors[wallet_type] = constructor
# former WalletFactory
class Wallet(object):
"""The main wallet "entry point".
This class is actually a factory that will return a wallet of the correct
type when passed a WalletStorage instance."""
def __new__(self, storage):
wallet_type = storage.get('wallet_type')
WalletClass = Wallet.wallet_class(wallet_type)
wallet = WalletClass(storage)
# Convert hardware wallets restored with older versions of
# Electrum to BIP44 wallets. A hardware wallet does not have
# a seed and plugins do not need to handle having one.
rwc = getattr(wallet, 'restore_wallet_class', None)
if rwc and storage.get('seed', ''):
storage.print_error("converting wallet type to " + rwc.wallet_type)
storage.put('wallet_type', rwc.wallet_type)
wallet = rwc(storage)
return wallet
@staticmethod
def wallet_class(wallet_type):
if multisig_type(wallet_type):
return Multisig_Wallet
if wallet_type in wallet_constructors:
return wallet_constructors[wallet_type]
raise RuntimeError("Unknown wallet type: " + str(wallet_type))
| 38.551869
| 150
| 0.587023
|
40aa2dca22ddd651bd4f80c6bfa204554df39d2c
| 4,909
|
py
|
Python
|
talon_one/models/inline_response20015.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-03-05T06:41:26.000Z
|
2021-03-05T06:41:26.000Z
|
talon_one/models/inline_response20015.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-09-07T08:56:58.000Z
|
2021-09-07T08:56:58.000Z
|
talon_one/models/inline_response20015.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2019-05-21T10:27:54.000Z
|
2019-05-21T10:27:54.000Z
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class InlineResponse20015(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'has_more': 'bool',
'data': 'list[CustomerActivityReport]'
}
attribute_map = {
'has_more': 'hasMore',
'data': 'data'
}
def __init__(self, has_more=None, data=None, local_vars_configuration=None): # noqa: E501
"""InlineResponse20015 - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._has_more = None
self._data = None
self.discriminator = None
self.has_more = has_more
self.data = data
@property
def has_more(self):
"""Gets the has_more of this InlineResponse20015. # noqa: E501
:return: The has_more of this InlineResponse20015. # noqa: E501
:rtype: bool
"""
return self._has_more
@has_more.setter
def has_more(self, has_more):
"""Sets the has_more of this InlineResponse20015.
:param has_more: The has_more of this InlineResponse20015. # noqa: E501
:type: bool
"""
if self.local_vars_configuration.client_side_validation and has_more is None: # noqa: E501
raise ValueError("Invalid value for `has_more`, must not be `None`") # noqa: E501
self._has_more = has_more
@property
def data(self):
"""Gets the data of this InlineResponse20015. # noqa: E501
:return: The data of this InlineResponse20015. # noqa: E501
:rtype: list[CustomerActivityReport]
"""
return self._data
@data.setter
def data(self, data):
"""Sets the data of this InlineResponse20015.
:param data: The data of this InlineResponse20015. # noqa: E501
:type: list[CustomerActivityReport]
"""
if self.local_vars_configuration.client_side_validation and data is None: # noqa: E501
raise ValueError("Invalid value for `data`, must not be `None`") # noqa: E501
self._data = data
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, InlineResponse20015):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, InlineResponse20015):
return True
return self.to_dict() != other.to_dict()
| 32.946309
| 647
| 0.614382
|
14134f0108c740e30fb4ea423a032590f49fa6f3
| 1,967
|
py
|
Python
|
examples/optimal_time_ocp/pendulum_min_time_Mayer.py
|
vennand/BiorbdOptim
|
9c741da6366e0c9d5d727da6297a17a599759eb1
|
[
"MIT"
] | null | null | null |
examples/optimal_time_ocp/pendulum_min_time_Mayer.py
|
vennand/BiorbdOptim
|
9c741da6366e0c9d5d727da6297a17a599759eb1
|
[
"MIT"
] | null | null | null |
examples/optimal_time_ocp/pendulum_min_time_Mayer.py
|
vennand/BiorbdOptim
|
9c741da6366e0c9d5d727da6297a17a599759eb1
|
[
"MIT"
] | null | null | null |
import biorbd
from bioptim import (
OptimalControlProgram,
DynamicsTypeList,
DynamicsType,
ObjectiveList,
Objective,
BoundsList,
QAndQDotBounds,
InitialGuessList,
ShowResult,
Data,
)
def prepare_ocp(biorbd_model_path, final_time, number_shooting_points):
# --- Options --- #
biorbd_model = biorbd.Model(biorbd_model_path)
tau_min, tau_max, tau_init = -100, 100, 0
n_q = biorbd_model.nbQ()
n_qdot = biorbd_model.nbQdot()
n_tau = biorbd_model.nbGeneralizedTorque()
# Add objective functions
objective_functions = ObjectiveList()
objective_functions.add(Objective.Mayer.MINIMIZE_TIME)
# Dynamics
dynamics = DynamicsTypeList()
dynamics.add(DynamicsType.TORQUE_DRIVEN)
# Path constraint
x_bounds = BoundsList()
x_bounds.add(QAndQDotBounds(biorbd_model))
x_bounds[0][:, [0, -1]] = 0
x_bounds[0][n_q - 1, -1] = 3.14
# Initial guess
x_init = InitialGuessList()
x_init.add([0] * (n_q + n_qdot))
# Define control path constraint
u_bounds = BoundsList()
u_bounds.add([[tau_min] * n_tau, [tau_max] * n_tau])
u_bounds[0][n_tau - 1, :] = 0
u_init = InitialGuessList()
u_init.add([tau_init] * n_tau)
# ------------- #
return OptimalControlProgram(
biorbd_model,
dynamics,
number_shooting_points,
final_time,
x_init,
u_init,
x_bounds,
u_bounds,
objective_functions,
)
if __name__ == "__main__":
ocp = prepare_ocp(biorbd_model_path="pendulum.bioMod", final_time=2, number_shooting_points=50)
# --- Solve the program --- #
sol = ocp.solve(show_online_optim=True)
# --- Show results --- #
param = Data.get_data(ocp, sol["x"], get_states=False, get_controls=False, get_parameters=True)
print(f"The optimized phase time is: {param['time'][0, 0]}, good job Mayer!")
result = ShowResult(ocp, sol)
result.animate()
| 25.217949
| 99
| 0.648195
|
b8ed8d951e93628e877377ee276bce31f1889b1e
| 1,028
|
py
|
Python
|
stubs.min/System/Windows/Media/__init___parts/TileMode.py
|
ricardyn/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2021-02-02T13:39:16.000Z
|
2021-02-02T13:39:16.000Z
|
stubs.min/System/Windows/Media/__init___parts/TileMode.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Windows/Media/__init___parts/TileMode.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class TileMode(Enum,IComparable,IFormattable,IConvertible):
"""
Describes how a System.Windows.Media.TileBrush paints tiles onto an output area.
enum TileMode,values: FlipX (1),FlipXY (3),FlipY (2),None (0),Tile (4)
"""
def __eq__(self,*args):
""" x.__eq__(y) <==> x==yx.__eq__(y) <==> x==yx.__eq__(y) <==> x==y """
pass
def __format__(self,*args):
""" __format__(formattable: IFormattable,format: str) -> str """
pass
def __ge__(self,*args):
pass
def __gt__(self,*args):
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
def __le__(self,*args):
pass
def __lt__(self,*args):
pass
def __ne__(self,*args):
pass
def __reduce_ex__(self,*args):
pass
def __str__(self,*args):
pass
FlipX=None
FlipXY=None
FlipY=None
None=None
Tile=None
value__=None
| 27.783784
| 215
| 0.660506
|
77c9b1262e28e99e277ed8ddb3f0512b09c81605
| 17,198
|
py
|
Python
|
sdk/python/pulumi_gcp/compute/get_backend_service.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/compute/get_backend_service.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/compute/get_backend_service.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetBackendServiceResult',
'AwaitableGetBackendServiceResult',
'get_backend_service',
'get_backend_service_output',
]
@pulumi.output_type
class GetBackendServiceResult:
"""
A collection of values returned by getBackendService.
"""
def __init__(__self__, affinity_cookie_ttl_sec=None, backends=None, cdn_policies=None, circuit_breakers=None, connection_draining_timeout_sec=None, consistent_hash=None, creation_timestamp=None, custom_request_headers=None, custom_response_headers=None, description=None, enable_cdn=None, fingerprint=None, health_checks=None, iaps=None, id=None, load_balancing_scheme=None, locality_lb_policy=None, log_configs=None, name=None, outlier_detections=None, port_name=None, project=None, protocol=None, security_policy=None, security_settings=None, self_link=None, session_affinity=None, timeout_sec=None):
if affinity_cookie_ttl_sec and not isinstance(affinity_cookie_ttl_sec, int):
raise TypeError("Expected argument 'affinity_cookie_ttl_sec' to be a int")
pulumi.set(__self__, "affinity_cookie_ttl_sec", affinity_cookie_ttl_sec)
if backends and not isinstance(backends, list):
raise TypeError("Expected argument 'backends' to be a list")
pulumi.set(__self__, "backends", backends)
if cdn_policies and not isinstance(cdn_policies, list):
raise TypeError("Expected argument 'cdn_policies' to be a list")
pulumi.set(__self__, "cdn_policies", cdn_policies)
if circuit_breakers and not isinstance(circuit_breakers, list):
raise TypeError("Expected argument 'circuit_breakers' to be a list")
pulumi.set(__self__, "circuit_breakers", circuit_breakers)
if connection_draining_timeout_sec and not isinstance(connection_draining_timeout_sec, int):
raise TypeError("Expected argument 'connection_draining_timeout_sec' to be a int")
pulumi.set(__self__, "connection_draining_timeout_sec", connection_draining_timeout_sec)
if consistent_hash and not isinstance(consistent_hash, list):
raise TypeError("Expected argument 'consistent_hash' to be a list")
pulumi.set(__self__, "consistent_hash", consistent_hash)
if creation_timestamp and not isinstance(creation_timestamp, str):
raise TypeError("Expected argument 'creation_timestamp' to be a str")
pulumi.set(__self__, "creation_timestamp", creation_timestamp)
if custom_request_headers and not isinstance(custom_request_headers, list):
raise TypeError("Expected argument 'custom_request_headers' to be a list")
pulumi.set(__self__, "custom_request_headers", custom_request_headers)
if custom_response_headers and not isinstance(custom_response_headers, list):
raise TypeError("Expected argument 'custom_response_headers' to be a list")
pulumi.set(__self__, "custom_response_headers", custom_response_headers)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if enable_cdn and not isinstance(enable_cdn, bool):
raise TypeError("Expected argument 'enable_cdn' to be a bool")
pulumi.set(__self__, "enable_cdn", enable_cdn)
if fingerprint and not isinstance(fingerprint, str):
raise TypeError("Expected argument 'fingerprint' to be a str")
pulumi.set(__self__, "fingerprint", fingerprint)
if health_checks and not isinstance(health_checks, list):
raise TypeError("Expected argument 'health_checks' to be a list")
pulumi.set(__self__, "health_checks", health_checks)
if iaps and not isinstance(iaps, list):
raise TypeError("Expected argument 'iaps' to be a list")
pulumi.set(__self__, "iaps", iaps)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if load_balancing_scheme and not isinstance(load_balancing_scheme, str):
raise TypeError("Expected argument 'load_balancing_scheme' to be a str")
pulumi.set(__self__, "load_balancing_scheme", load_balancing_scheme)
if locality_lb_policy and not isinstance(locality_lb_policy, str):
raise TypeError("Expected argument 'locality_lb_policy' to be a str")
pulumi.set(__self__, "locality_lb_policy", locality_lb_policy)
if log_configs and not isinstance(log_configs, list):
raise TypeError("Expected argument 'log_configs' to be a list")
pulumi.set(__self__, "log_configs", log_configs)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if outlier_detections and not isinstance(outlier_detections, list):
raise TypeError("Expected argument 'outlier_detections' to be a list")
pulumi.set(__self__, "outlier_detections", outlier_detections)
if port_name and not isinstance(port_name, str):
raise TypeError("Expected argument 'port_name' to be a str")
pulumi.set(__self__, "port_name", port_name)
if project and not isinstance(project, str):
raise TypeError("Expected argument 'project' to be a str")
pulumi.set(__self__, "project", project)
if protocol and not isinstance(protocol, str):
raise TypeError("Expected argument 'protocol' to be a str")
pulumi.set(__self__, "protocol", protocol)
if security_policy and not isinstance(security_policy, str):
raise TypeError("Expected argument 'security_policy' to be a str")
pulumi.set(__self__, "security_policy", security_policy)
if security_settings and not isinstance(security_settings, list):
raise TypeError("Expected argument 'security_settings' to be a list")
pulumi.set(__self__, "security_settings", security_settings)
if self_link and not isinstance(self_link, str):
raise TypeError("Expected argument 'self_link' to be a str")
pulumi.set(__self__, "self_link", self_link)
if session_affinity and not isinstance(session_affinity, str):
raise TypeError("Expected argument 'session_affinity' to be a str")
pulumi.set(__self__, "session_affinity", session_affinity)
if timeout_sec and not isinstance(timeout_sec, int):
raise TypeError("Expected argument 'timeout_sec' to be a int")
pulumi.set(__self__, "timeout_sec", timeout_sec)
@property
@pulumi.getter(name="affinityCookieTtlSec")
def affinity_cookie_ttl_sec(self) -> int:
return pulumi.get(self, "affinity_cookie_ttl_sec")
@property
@pulumi.getter
def backends(self) -> Sequence['outputs.GetBackendServiceBackendResult']:
"""
The set of backends that serve this Backend Service.
"""
return pulumi.get(self, "backends")
@property
@pulumi.getter(name="cdnPolicies")
def cdn_policies(self) -> Sequence['outputs.GetBackendServiceCdnPolicyResult']:
return pulumi.get(self, "cdn_policies")
@property
@pulumi.getter(name="circuitBreakers")
def circuit_breakers(self) -> Sequence['outputs.GetBackendServiceCircuitBreakerResult']:
return pulumi.get(self, "circuit_breakers")
@property
@pulumi.getter(name="connectionDrainingTimeoutSec")
def connection_draining_timeout_sec(self) -> int:
"""
Time for which instance will be drained (not accept new connections, but still work to finish started ones).
"""
return pulumi.get(self, "connection_draining_timeout_sec")
@property
@pulumi.getter(name="consistentHash")
def consistent_hash(self) -> Sequence['outputs.GetBackendServiceConsistentHashResult']:
return pulumi.get(self, "consistent_hash")
@property
@pulumi.getter(name="creationTimestamp")
def creation_timestamp(self) -> str:
return pulumi.get(self, "creation_timestamp")
@property
@pulumi.getter(name="customRequestHeaders")
def custom_request_headers(self) -> Sequence[str]:
return pulumi.get(self, "custom_request_headers")
@property
@pulumi.getter(name="customResponseHeaders")
def custom_response_headers(self) -> Sequence[str]:
return pulumi.get(self, "custom_response_headers")
@property
@pulumi.getter
def description(self) -> str:
"""
Textual description for the Backend Service.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="enableCdn")
def enable_cdn(self) -> bool:
"""
Whether or not Cloud CDN is enabled on the Backend Service.
"""
return pulumi.get(self, "enable_cdn")
@property
@pulumi.getter
def fingerprint(self) -> str:
"""
The fingerprint of the Backend Service.
"""
return pulumi.get(self, "fingerprint")
@property
@pulumi.getter(name="healthChecks")
def health_checks(self) -> Sequence[str]:
"""
The set of HTTP/HTTPS health checks used by the Backend Service.
"""
return pulumi.get(self, "health_checks")
@property
@pulumi.getter
def iaps(self) -> Sequence['outputs.GetBackendServiceIapResult']:
return pulumi.get(self, "iaps")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="loadBalancingScheme")
def load_balancing_scheme(self) -> str:
return pulumi.get(self, "load_balancing_scheme")
@property
@pulumi.getter(name="localityLbPolicy")
def locality_lb_policy(self) -> str:
return pulumi.get(self, "locality_lb_policy")
@property
@pulumi.getter(name="logConfigs")
def log_configs(self) -> Sequence['outputs.GetBackendServiceLogConfigResult']:
return pulumi.get(self, "log_configs")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outlierDetections")
def outlier_detections(self) -> Sequence['outputs.GetBackendServiceOutlierDetectionResult']:
return pulumi.get(self, "outlier_detections")
@property
@pulumi.getter(name="portName")
def port_name(self) -> str:
"""
The name of a service that has been added to an instance group in this backend.
"""
return pulumi.get(self, "port_name")
@property
@pulumi.getter
def project(self) -> Optional[str]:
return pulumi.get(self, "project")
@property
@pulumi.getter
def protocol(self) -> str:
"""
The protocol for incoming requests.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter(name="securityPolicy")
def security_policy(self) -> str:
return pulumi.get(self, "security_policy")
@property
@pulumi.getter(name="securitySettings")
def security_settings(self) -> Sequence['outputs.GetBackendServiceSecuritySettingResult']:
return pulumi.get(self, "security_settings")
@property
@pulumi.getter(name="selfLink")
def self_link(self) -> str:
"""
The URI of the Backend Service.
"""
return pulumi.get(self, "self_link")
@property
@pulumi.getter(name="sessionAffinity")
def session_affinity(self) -> str:
"""
The Backend Service session stickiness configuration.
"""
return pulumi.get(self, "session_affinity")
@property
@pulumi.getter(name="timeoutSec")
def timeout_sec(self) -> int:
"""
The number of seconds to wait for a backend to respond to a request before considering the request failed.
"""
return pulumi.get(self, "timeout_sec")
class AwaitableGetBackendServiceResult(GetBackendServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBackendServiceResult(
affinity_cookie_ttl_sec=self.affinity_cookie_ttl_sec,
backends=self.backends,
cdn_policies=self.cdn_policies,
circuit_breakers=self.circuit_breakers,
connection_draining_timeout_sec=self.connection_draining_timeout_sec,
consistent_hash=self.consistent_hash,
creation_timestamp=self.creation_timestamp,
custom_request_headers=self.custom_request_headers,
custom_response_headers=self.custom_response_headers,
description=self.description,
enable_cdn=self.enable_cdn,
fingerprint=self.fingerprint,
health_checks=self.health_checks,
iaps=self.iaps,
id=self.id,
load_balancing_scheme=self.load_balancing_scheme,
locality_lb_policy=self.locality_lb_policy,
log_configs=self.log_configs,
name=self.name,
outlier_detections=self.outlier_detections,
port_name=self.port_name,
project=self.project,
protocol=self.protocol,
security_policy=self.security_policy,
security_settings=self.security_settings,
self_link=self.self_link,
session_affinity=self.session_affinity,
timeout_sec=self.timeout_sec)
def get_backend_service(name: Optional[str] = None,
project: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBackendServiceResult:
"""
Provide access to a Backend Service's attribute. For more information
see [the official documentation](https://cloud.google.com/compute/docs/load-balancing/http/backend-service)
and the [API](https://cloud.google.com/compute/docs/reference/latest/backendServices).
:param str name: The name of the Backend Service.
:param str project: The project in which the resource belongs. If it is not provided, the provider project is used.
"""
__args__ = dict()
__args__['name'] = name
__args__['project'] = project
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('gcp:compute/getBackendService:getBackendService', __args__, opts=opts, typ=GetBackendServiceResult).value
return AwaitableGetBackendServiceResult(
affinity_cookie_ttl_sec=__ret__.affinity_cookie_ttl_sec,
backends=__ret__.backends,
cdn_policies=__ret__.cdn_policies,
circuit_breakers=__ret__.circuit_breakers,
connection_draining_timeout_sec=__ret__.connection_draining_timeout_sec,
consistent_hash=__ret__.consistent_hash,
creation_timestamp=__ret__.creation_timestamp,
custom_request_headers=__ret__.custom_request_headers,
custom_response_headers=__ret__.custom_response_headers,
description=__ret__.description,
enable_cdn=__ret__.enable_cdn,
fingerprint=__ret__.fingerprint,
health_checks=__ret__.health_checks,
iaps=__ret__.iaps,
id=__ret__.id,
load_balancing_scheme=__ret__.load_balancing_scheme,
locality_lb_policy=__ret__.locality_lb_policy,
log_configs=__ret__.log_configs,
name=__ret__.name,
outlier_detections=__ret__.outlier_detections,
port_name=__ret__.port_name,
project=__ret__.project,
protocol=__ret__.protocol,
security_policy=__ret__.security_policy,
security_settings=__ret__.security_settings,
self_link=__ret__.self_link,
session_affinity=__ret__.session_affinity,
timeout_sec=__ret__.timeout_sec)
@_utilities.lift_output_func(get_backend_service)
def get_backend_service_output(name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[Optional[str]]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetBackendServiceResult]:
"""
Provide access to a Backend Service's attribute. For more information
see [the official documentation](https://cloud.google.com/compute/docs/load-balancing/http/backend-service)
and the [API](https://cloud.google.com/compute/docs/reference/latest/backendServices).
:param str name: The name of the Backend Service.
:param str project: The project in which the resource belongs. If it is not provided, the provider project is used.
"""
...
| 44.210797
| 606
| 0.690662
|
22c91c0c6eb375339fc4bfae421afa9164d012ce
| 886
|
py
|
Python
|
tests/integration_tests/builder_tests/test_init_beach_line.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
tests/integration_tests/builder_tests/test_init_beach_line.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
tests/integration_tests/builder_tests/test_init_beach_line.py
|
lycantropos/voronoi
|
977e0b3e5eff2dd294e2e6ce1a8030c763e86233
|
[
"MIT"
] | null | null | null |
from hypothesis import given
from tests.integration_tests.hints import BoundPortedBuildersWithDiagramsPair
from tests.integration_tests.utils import (are_bound_ported_builders_equal,
are_bound_ported_diagrams_equal)
from tests.utils import equivalence
from . import strategies
@given(strategies.initialized_valid_builders_with_diagrams_pairs)
def test_basic(builders_with_diagrams_pair: BoundPortedBuildersWithDiagramsPair
) -> None:
((bound, bound_diagram),
(ported, ported_diagram)) = builders_with_diagrams_pair
bound_result = bound.init_beach_line(bound_diagram)
ported_result = ported.init_beach_line(ported_diagram)
assert equivalence(bound_result, ported_result)
assert are_bound_ported_builders_equal(bound, ported)
assert are_bound_ported_diagrams_equal(bound_diagram, ported_diagram)
| 40.272727
| 79
| 0.788939
|
2fa6caeb21388891f99d20a7dd7d05fc250fac17
| 3,442
|
bzl
|
Python
|
bazel/bison.bzl
|
snsokolov/verible
|
7c59a17b975c9e87fb5d675540dc788d389edac9
|
[
"Apache-2.0"
] | null | null | null |
bazel/bison.bzl
|
snsokolov/verible
|
7c59a17b975c9e87fb5d675540dc788d389edac9
|
[
"Apache-2.0"
] | null | null | null |
bazel/bison.bzl
|
snsokolov/verible
|
7c59a17b975c9e87fb5d675540dc788d389edac9
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017-2019 The Verible Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Build rule for generating C or C++ sources with Bison.
"""
def _genyacc_impl(ctx):
"""Implementation for genyacc rule."""
# Argument list
args = ctx.actions.args()
args.add("--defines=%s" % ctx.outputs.header_out.path)
args.add("--output-file=%s" % ctx.outputs.source_out.path)
if ctx.attr.prefix:
args.add("--name-prefix=%s" % ctx.attr.prefix)
args.add_all([ctx.expand_location(opt) for opt in ctx.attr.extra_options])
args.add(ctx.file.src.path)
# Output files
outputs = ctx.outputs.extra_outs + [
ctx.outputs.header_out,
ctx.outputs.source_out,
]
ctx.actions.run(
executable = ctx.executable._bison,
env = {
"M4": ctx.executable._m4.path,
},
arguments = [args],
inputs = ctx.files.src,
tools = [ctx.executable._m4],
outputs = outputs,
mnemonic = "Yacc",
progress_message = "Generating %s and %s from %s" %
(
ctx.outputs.source_out.short_path,
ctx.outputs.header_out.short_path,
ctx.file.src.short_path,
),
)
genyacc = rule(
attrs = {
"src": attr.label(
mandatory = True,
allow_single_file = [
".y",
".yy",
".yc",
".ypp",
],
doc = "The .y, .yy, or .yc source file for this rule",
),
"header_out": attr.output(
mandatory = True,
doc = "The generated 'defines' header file",
),
"source_out": attr.output(
mandatory = True,
doc = "The generated source file",
),
"prefix": attr.string(
doc = "External symbol prefix for Bison. This string is " +
"passed to bison as the -p option, causing the resulting C " +
"file to define external functions named 'prefix'parse, " +
"'prefix'lex, etc. instead of yyparse, yylex, etc.",
),
"extra_outs": attr.output_list(doc = "A list of extra generated output files."),
"extra_options": attr.string_list(
doc = "A list of extra options to pass to Bison. These are " +
"subject to $(location ...) expansion.",
),
"_bison": attr.label(
default = Label("//bazel:bison_bin"),
executable = True,
cfg = "host",
),
"_m4": attr.label(
default = Label("//bazel:m4_bin"),
executable = True,
cfg = "host",
),
},
doc = "Generate C/C++-language sources from a Yacc file using Bison.",
output_to_genfiles = True,
implementation = _genyacc_impl,
)
| 34.42
| 88
| 0.551424
|
a8ee3a33c2129ab23691836ef9413a79749633a8
| 18,711
|
py
|
Python
|
sdk/tables/azure-data-tables/azure/data/tables/_table_client.py
|
KarishmaGhiya/azure-sdk-for-python
|
1216acf1caa13575d3b8cfa0b401e42eefa9f17f
|
[
"MIT"
] | null | null | null |
sdk/tables/azure-data-tables/azure/data/tables/_table_client.py
|
KarishmaGhiya/azure-sdk-for-python
|
1216acf1caa13575d3b8cfa0b401e42eefa9f17f
|
[
"MIT"
] | null | null | null |
sdk/tables/azure-data-tables/azure/data/tables/_table_client.py
|
KarishmaGhiya/azure-sdk-for-python
|
1216acf1caa13575d3b8cfa0b401e42eefa9f17f
|
[
"MIT"
] | null | null | null |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
from typing import Optional, Any, Union # pylint: disable = W0611
try:
from urllib.parse import urlparse, unquote
except ImportError:
from urlparse import urlparse # type: ignore
from urllib2 import unquote # type: ignore
from azure.core.paging import ItemPaged
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.core.tracing.decorator import distributed_trace
from ._deserialize import _convert_to_entity
from ._entity import TableEntity
from ._generated import AzureTable
from ._generated.models import AccessPolicy, SignedIdentifier, TableProperties, QueryOptions
from ._serialize import _get_match_headers, _add_entity_properties
from ._base_client import parse_connection_str
from ._table_client_base import TableClientBase
from ._serialize import serialize_iso
from ._deserialize import _return_headers_and_deserialized
from ._error import _process_table_error
from ._version import VERSION
from ._models import TableEntityPropertiesPaged, UpdateMode, Table
class TableClient(TableClientBase):
""" :ivar str account_name: Name of the storage account (Cosmos or Azure)"""
def __init__(
self, account_url, # type: str
table_name, # type: str
credential=None, # type: Union[str,TokenCredential]
**kwargs # type: Any
):
# type: (...) -> None
"""Create TableClient from a Credential.
:param account_url:
A url to an Azure Storage account.
:type account_url: str
:param table_name: The table name.
:type table_name: str
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token, or the connection string already has shared
access key values. The value can be a SAS token string, an account shared access
key, or an instance of a TokenCredentials class from azure.identity.
:type credential: Union[str,TokenCredential]
:returns: None
"""
super(TableClient, self).__init__(account_url, table_name, credential=credential, **kwargs)
self._client = AzureTable(self.url, pipeline=self._pipeline)
self._client._config.version = kwargs.get('api_version', VERSION) # pylint: disable=protected-access
@classmethod
def from_connection_string(
cls, conn_str, # type: str
table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> TableClient
"""Create TableClient from a Connection String.
:param conn_str:
A connection string to an Azure Storage or Cosmos account.
:type conn_str: str
:param table_name: The table name.
:type table_name: str
:returns: A table client.
:rtype: ~azure.data.tables.TableClient
"""
account_url, secondary, credential = parse_connection_str(
conn_str=conn_str, credential=None, service='table')
if 'secondary_hostname' not in kwargs:
kwargs['secondary_hostname'] = secondary
return cls(account_url, table_name=table_name, credential=credential, **kwargs) # type: ignore
@classmethod
def from_table_url(cls, table_url, credential=None, **kwargs):
# type: (str, Optional[Any], Any) -> TableClient
"""A client to interact with a specific Table.
:param table_url: The full URI to the table, including SAS token if used.
:type table_url: str
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be a SAS token string, an account
shared access key.
:type credential: str
:returns: A table client.
:rtype: ~azure.data.tables.TableClient
"""
try:
if not table_url.lower().startswith('http'):
table_url = "https://" + table_url
except AttributeError:
raise ValueError("Table URL must be a string.")
parsed_url = urlparse(table_url.rstrip('/'))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(table_url))
table_path = parsed_url.path.lstrip('/').split('/')
account_path = ""
if len(table_path) > 1:
account_path = "/" + "/".join(table_path[:-1])
account_url = "{}://{}{}?{}".format(
parsed_url.scheme,
parsed_url.netloc.rstrip('/'),
account_path,
parsed_url.query)
table_name = unquote(table_path[-1])
if not table_name:
raise ValueError("Invalid URL. Please provide a URL with a valid table name")
return cls(account_url, table_name=table_name, credential=credential, **kwargs)
@distributed_trace
def get_table_access_policy(
self,
**kwargs # type: Any
):
# type: (...) -> dict[str,AccessPolicy]
"""Retrieves details about any stored access policies specified on the table that may be
used with Shared Access Signatures.
:return: Dictionary of SignedIdentifiers
:rtype: dict[str,AccessPolicy]
:raises: ~azure.core.exceptions.HttpResponseError
"""
timeout = kwargs.pop('timeout', None)
try:
_, identifiers = self._client.table.get_access_policy(
table=self.table_name,
timeout=timeout,
cls=kwargs.pop('cls', None) or _return_headers_and_deserialized,
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
return {s.id: s.access_policy or AccessPolicy() for s in identifiers} # pylint: disable=E1125
@distributed_trace
def set_table_access_policy(
self,
signed_identifiers, # type: dict[str,AccessPolicy]
**kwargs):
# type: (...) -> None
"""Sets stored access policies for the table that may be used with Shared Access Signatures.
:param signed_identifiers:
:type signed_identifiers: dict[str,AccessPolicy]
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
self._validate_signed_identifiers(signed_identifiers)
identifiers = []
for key, value in signed_identifiers.items():
if value:
value.start = serialize_iso(value.start)
value.expiry = serialize_iso(value.expiry)
identifiers.append(SignedIdentifier(id=key, access_policy=value))
signed_identifiers = identifiers # type: ignore
try:
self._client.table.set_access_policy(
table=self.table_name,
table_acl=signed_identifiers or None,
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def create_table(
self,
**kwargs # type: Any
):
# type: (...) -> Table
"""Creates a new table under the current account.
:return: Table created
:rtype: Table
:raises: ~azure.core.exceptions.HttpResponseError
"""
table_properties = TableProperties(table_name=self.table_name, **kwargs)
try:
table = self._client.table.create(table_properties)
return Table(table=table)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def delete_table(
self,
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes the table under the current account.
:return: None
:rtype: None
"""
try:
self._client.table.delete(table=self.table_name, **kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def delete_entity(
self,
partition_key, # type: str
row_key, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes the specified entity in a table.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
:keyword str etag: Etag of the entity
:keyword ~azure.core.MatchConditions match_condition: MatchCondition
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
if_match, if_not_match = _get_match_headers(kwargs=dict(kwargs, etag=kwargs.pop('etag', None),
match_condition=kwargs.pop('match_condition', None)),
etag_param='etag', match_param='match_condition')
try:
self._client.table.delete_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
if_match=if_match or if_not_match or '*',
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def create_entity(
self,
entity, # type: Union[TableEntity, dict[str,str]]
**kwargs # type: Any
):
# type: (...) -> TableEntity
"""Insert entity in a table.
:param entity: The properties for the table entity.
:type entity: Union[TableEntity, dict[str,str]]
:return: TableEntity mapping str to azure.data.tables.EntityProperty
:rtype: ~azure.data.tables.TableEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
if "PartitionKey" in entity and "RowKey" in entity:
entity = _add_entity_properties(entity)
# TODO: Remove - and run test to see what happens with the service
else:
raise ValueError('PartitionKey and RowKey were not provided in entity')
try:
inserted_entity = self._client.table.insert_entity(
table=self.table_name,
table_entity_properties=entity,
**kwargs
)
properties = _convert_to_entity(inserted_entity)
return properties
except ResourceNotFoundError as error:
_process_table_error(error)
@distributed_trace
def update_entity( # pylint:disable=R1710
self,
entity, # type: Union[TableEntity, dict[str,str]]
mode=UpdateMode.MERGE, # type: UpdateMode
**kwargs # type: Any
):
# type: (...) -> None
"""Update entity in a table.
:param entity: The properties for the table entity.
:type entity: Union[TableEntity, dict[str,str]]
:param mode: Merge or Replace entity
:type mode: ~azure.data.tables.UpdateMode
:keyword str partition_key: The partition key of the entity.
:keyword str row_key: The row key of the entity.
:keyword str etag: Etag of the entity
:keyword ~azure.core.MatchConditions match_condition: MatchCondition
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
if_match, if_not_match = _get_match_headers(kwargs=dict(kwargs, etag=kwargs.pop('etag', None),
match_condition=kwargs.pop('match_condition', None)),
etag_param='etag', match_param='match_condition')
partition_key = entity['PartitionKey']
row_key = entity['RowKey']
entity = _add_entity_properties(entity)
try:
if mode is UpdateMode.REPLACE:
self._client.table.update_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
if_match=if_match or if_not_match or "*",
**kwargs)
elif mode is UpdateMode.MERGE:
self._client.table.merge_entity(table=self.table_name, partition_key=partition_key,
row_key=row_key, if_match=if_match or if_not_match or "*",
table_entity_properties=entity, **kwargs)
else:
raise ValueError('Mode type is not supported')
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def list_entities(
self,
**kwargs # type: Any
):
# type: (...) -> ItemPaged[TableEntity]
"""Lists entities in a table.
:keyword int results_per_page: Number of entities per page in return ItemPaged
:keyword Union[str, list(str)] select: Specify desired properties of an entity to return certain entities
:return: Query of table entities
:rtype: ItemPaged[TableEntity]
:raises: ~azure.core.exceptions.HttpResponseError
"""
user_select = kwargs.pop('select', None)
if user_select and not isinstance(user_select, str):
user_select = ", ".join(user_select)
query_options = QueryOptions(top=kwargs.pop('results_per_page', None), select=user_select)
command = functools.partial(
self._client.table.query_entities,
**kwargs)
return ItemPaged(
command, results_per_page=query_options, table=self.table_name,
page_iterator_class=TableEntityPropertiesPaged
)
@distributed_trace
def query_entities(
self,
filter, # type: str # pylint: disable = W0622
**kwargs
):
# type: (...) -> ItemPaged[TableEntity]
"""Lists entities in a table.
:param str filter: Specify a filter to return certain entities
:keyword int results_per_page: Number of entities per page in return ItemPaged
:keyword Union[str, list[str]] select: Specify desired properties of an entity to return certain entities
:keyword dict parameters: Dictionary for formatting query with additional, user defined parameters
:return: Query of table entities
:rtype: ItemPaged[TableEntity]
:raises: ~azure.core.exceptions.HttpResponseError
"""
parameters = kwargs.pop('parameters', None)
filter = self._parameter_filter_substitution(parameters, filter) # pylint: disable = W0622
user_select = kwargs.pop('select', None)
if user_select and not isinstance(user_select, str):
user_select = ", ".join(user_select)
query_options = QueryOptions(top=kwargs.pop('results_per_page', None), select=user_select,
filter=filter)
command = functools.partial(
self._client.table.query_entities,
query_options=query_options,
**kwargs)
return ItemPaged(
command, table=self.table_name,
page_iterator_class=TableEntityPropertiesPaged
)
@distributed_trace
def get_entity(
self,
partition_key, # type: str
row_key, # type: str
**kwargs # type: Any
):
# type: (...) -> TableEntity
"""Queries entities in a table.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
:return: Entity mapping str to azure.data.tables.EntityProperty
:rtype: ~azure.data.tables.TableEntity
:raises: ~azure.core.exceptions.HttpResponseError
"""
try:
entity = self._client.table.query_entities_with_partition_and_row_key(table=self.table_name,
partition_key=partition_key,
row_key=row_key,
**kwargs)
properties = _convert_to_entity(entity.additional_properties)
return properties
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def upsert_entity( # pylint:disable=R1710
self,
entity, # type: Union[TableEntity, dict[str,str]]
mode=UpdateMode.MERGE, # type: UpdateMode
**kwargs # type: Any
):
# type: (...) -> None
"""Update/Merge or Insert entity into table.
:param entity: The properties for the table entity.
:type entity: Union[TableEntity, dict[str,str]]
:param mode: Merge or Replace and Insert on fail
:type mode: ~azure.data.tables.UpdateMode
:return: None
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
partition_key = entity['PartitionKey']
row_key = entity['RowKey']
entity = _add_entity_properties(entity)
try:
if mode is UpdateMode.MERGE:
self._client.table.merge_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
**kwargs
)
elif mode is UpdateMode.REPLACE:
self._client.table.update_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
**kwargs)
else:
raise ValueError('Mode type is not supported')
except ResourceNotFoundError:
self.create_entity(
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
**kwargs
)
| 39.895522
| 117
| 0.593501
|
857af7246de1c36a5bc6142305fc36c3fb7f7f06
| 18,814
|
py
|
Python
|
UnityEngine/Collider2D/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
UnityEngine/Collider2D/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
UnityEngine/Collider2D/__init__.py
|
Grim-es/udon-pie-auto-completion
|
c2cd86554ed615cdbbb01e19fa40665eafdfaedc
|
[
"MIT"
] | null | null | null |
from typing import overload
from UdonPie import System
from UdonPie import UnityEngine
from UdonPie.Undefined import *
class Collider2D:
def __new__(cls, arg1=None):
'''
:returns: Collider2D
:rtype: UnityEngine.Collider2D
'''
pass
@staticmethod
def op_Implicit(arg1):
'''
:param arg1: Object
:type arg1: UnityEngine.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def op_Equality(arg1, arg2):
'''
:param arg1: Object
:type arg1: UnityEngine.Object
:param arg2: Object
:type arg2: UnityEngine.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def op_Inequality(arg1, arg2):
'''
:param arg1: Object
:type arg1: UnityEngine.Object
:param arg2: Object
:type arg2: UnityEngine.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def get_density():
'''
:returns: Single
:rtype: System.Single
'''
pass
@staticmethod
def set_density(arg1):
'''
:param arg1: Single
:type arg1: System.Single or float
'''
pass
@staticmethod
def get_isTrigger():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def set_isTrigger(arg1):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
'''
pass
@staticmethod
def get_usedByEffector():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def set_usedByEffector(arg1):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
'''
pass
@staticmethod
def get_usedByComposite():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def set_usedByComposite(arg1):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
'''
pass
@staticmethod
def get_composite():
'''
:returns: CompositeCollider2D
:rtype: UnityEngine.CompositeCollider2D
'''
pass
@staticmethod
def get_offset():
'''
:returns: Vector2
:rtype: UnityEngine.Vector2
'''
pass
@staticmethod
def set_offset(arg1):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
'''
pass
@staticmethod
def get_attachedRigidbody():
'''
:returns: Rigidbody2D
:rtype: UnityEngine.Rigidbody2D
'''
pass
@staticmethod
def get_shapeCount():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def get_bounds():
'''
:returns: Bounds
:rtype: UnityEngine.Bounds
'''
pass
@staticmethod
def get_sharedMaterial():
'''
:returns: PhysicsMaterial2D
:rtype: UnityEngine.PhysicsMaterial2D
'''
pass
@staticmethod
def set_sharedMaterial(arg1):
'''
:param arg1: PhysicsMaterial2D
:type arg1: UnityEngine.PhysicsMaterial2D
'''
pass
@staticmethod
def get_friction():
'''
:returns: Single
:rtype: System.Single
'''
pass
@staticmethod
def get_bounciness():
'''
:returns: Single
:rtype: System.Single
'''
pass
@staticmethod
@overload
def IsTouching(arg1):
'''
:param arg1: Collider2D
:type arg1: UnityEngine.Collider2D
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def IsTouching(arg1, arg2):
'''
:param arg1: Collider2D
:type arg1: UnityEngine.Collider2D
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def IsTouching(arg1):
'''
:param arg1: ContactFilter2D
:type arg1: UnityEngine.ContactFilter2D
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def IsTouching(arg1=None, arg2=None):
pass
@staticmethod
@overload
def IsTouchingLayers():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
@overload
def IsTouchingLayers(arg1):
'''
:param arg1: Int32
:type arg1: System.Int32 or int
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def IsTouchingLayers(arg1=None):
pass
@staticmethod
def OverlapPoint(arg1):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def Distance(arg1):
'''
:param arg1: Collider2D
:type arg1: UnityEngine.Collider2D
:returns: ColliderDistance2D
:rtype: UnityEngine.ColliderDistance2D
'''
pass
@staticmethod
def OverlapCollider(arg1, arg2):
'''
:param arg1: ContactFilter2D
:type arg1: UnityEngine.ContactFilter2D
:param arg2: Collider2DArray
:type arg2: UnityEngine.Collider2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def GetContacts(arg1):
'''
:param arg1: ContactPoint2DArray
:type arg1: UnityEngine.ContactPoint2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def GetContacts(arg1, arg2):
'''
:param arg1: ContactFilter2D
:type arg1: UnityEngine.ContactFilter2D
:param arg2: ContactPoint2DArray
:type arg2: UnityEngine.ContactPoint2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def GetContacts(arg1):
'''
:param arg1: Collider2DArray
:type arg1: UnityEngine.Collider2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def GetContacts(arg1, arg2):
'''
:param arg1: ContactFilter2D
:type arg1: UnityEngine.ContactFilter2D
:param arg2: Collider2DArray
:type arg2: UnityEngine.Collider2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def GetContacts(arg1=None, arg2=None):
pass
@staticmethod
@overload
def Cast(arg1, arg2):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Cast(arg1, arg2, arg3):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Cast(arg1, arg2, arg3, arg4):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:param arg4: Boolean
:type arg4: System.Boolean or bool
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Cast(arg1, arg2, arg3):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:param arg3: RaycastHit2DArray
:type arg3: UnityEngine.RaycastHit2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Cast(arg1, arg2, arg3, arg4):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:param arg3: RaycastHit2DArray
:type arg3: UnityEngine.RaycastHit2DArray
:param arg4: Single
:type arg4: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Cast(arg1, arg2, arg3, arg4, arg5):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:param arg3: RaycastHit2DArray
:type arg3: UnityEngine.RaycastHit2DArray
:param arg4: Single
:type arg4: System.Single or float
:param arg5: Boolean
:type arg5: System.Boolean or bool
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def Cast(arg1=None, arg2=None, arg3=None, arg4=None, arg5=None):
pass
@staticmethod
@overload
def Raycast(arg1, arg2):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3, arg4):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:param arg4: Int32
:type arg4: System.Int32 or int
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3, arg4, arg5):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Single
:type arg5: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3, arg4, arg5, arg6):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: RaycastHit2DArray
:type arg2: UnityEngine.RaycastHit2DArray
:param arg3: Single
:type arg3: System.Single or float
:param arg4: Int32
:type arg4: System.Int32 or int
:param arg5: Single
:type arg5: System.Single or float
:param arg6: Single
:type arg6: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:param arg3: RaycastHit2DArray
:type arg3: UnityEngine.RaycastHit2DArray
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
@overload
def Raycast(arg1, arg2, arg3, arg4):
'''
:param arg1: Vector2
:type arg1: UnityEngine.Vector2
:param arg2: ContactFilter2D
:type arg2: UnityEngine.ContactFilter2D
:param arg3: RaycastHit2DArray
:type arg3: UnityEngine.RaycastHit2DArray
:param arg4: Single
:type arg4: System.Single or float
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def Raycast(arg1=None, arg2=None, arg3=None, arg4=None, arg5=None, arg6=None):
pass
@staticmethod
def get_enabled():
'''
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def set_enabled(arg1):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
'''
pass
@staticmethod
def get_transform():
'''
:returns: Transform
:rtype: UnityEngine.Transform
'''
pass
@staticmethod
def get_gameObject():
'''
:returns: GameObject
:rtype: UnityEngine.GameObject
'''
pass
@staticmethod
@overload
def GetComponent(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: Component
:rtype: UnityEngine.Component
'''
pass
@staticmethod
@overload
def GetComponent(arg1):
'''
:param arg1: String
:type arg1: System.String or str
:returns: Component
:rtype: UnityEngine.Component
'''
pass
@staticmethod
def GetComponent(arg1=None):
pass
@staticmethod
@overload
def GetComponentInChildren(arg1, arg2):
'''
:param arg1: Type
:type arg1: System.Type
:param arg2: Boolean
:type arg2: System.Boolean or bool
:returns: Component
:rtype: UnityEngine.Component
'''
pass
@staticmethod
@overload
def GetComponentInChildren(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: Component
:rtype: UnityEngine.Component
'''
pass
@staticmethod
def GetComponentInChildren(arg1=None, arg2=None):
pass
@staticmethod
@overload
def GetComponentsInChildren(arg1, arg2):
'''
:param arg1: Type
:type arg1: System.Type
:param arg2: Boolean
:type arg2: System.Boolean or bool
:returns: ComponentArray
:rtype: UnityEngine.ComponentArray
'''
pass
@staticmethod
@overload
def GetComponentsInChildren(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: ComponentArray
:rtype: UnityEngine.ComponentArray
'''
pass
@staticmethod
@overload
def GetComponentsInChildren(arg1, arg2):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
:param arg2: Undefined variable
:type arg2: ListT.ListT
'''
pass
@staticmethod
@overload
def GetComponentsInChildren(arg1):
'''
:param arg1: Undefined variable
:type arg1: ListT.ListT
'''
pass
@staticmethod
def GetComponentsInChildren(arg1=None, arg2=None):
pass
@staticmethod
@overload
def GetComponentInParent(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: Component
:rtype: UnityEngine.Component
'''
pass
@staticmethod
def GetComponentInParent(arg1=None):
pass
@staticmethod
@overload
def GetComponentsInParent(arg1, arg2):
'''
:param arg1: Type
:type arg1: System.Type
:param arg2: Boolean
:type arg2: System.Boolean or bool
:returns: ComponentArray
:rtype: UnityEngine.ComponentArray
'''
pass
@staticmethod
@overload
def GetComponentsInParent(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: ComponentArray
:rtype: UnityEngine.ComponentArray
'''
pass
@staticmethod
@overload
def GetComponentsInParent(arg1, arg2):
'''
:param arg1: Boolean
:type arg1: System.Boolean or bool
:param arg2: Undefined variable
:type arg2: ListT.ListT
'''
pass
@staticmethod
def GetComponentsInParent(arg1=None, arg2=None):
pass
@staticmethod
@overload
def GetComponents(arg1):
'''
:param arg1: Type
:type arg1: System.Type
:returns: ComponentArray
:rtype: UnityEngine.ComponentArray
'''
pass
@staticmethod
@overload
def GetComponents(arg1, arg2):
'''
:param arg1: Type
:type arg1: System.Type
:param arg2: Undefined variable
:type arg2: SystemCollectionsGenericList.SystemCollectionsGenericList
'''
pass
@staticmethod
@overload
def GetComponents(arg1):
'''
:param arg1: Undefined variable
:type arg1: ListT.ListT
'''
pass
@staticmethod
def GetComponents(arg1=None, arg2=None):
pass
@staticmethod
def GetInstanceID():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def GetHashCode():
'''
:returns: Int32
:rtype: System.Int32
'''
pass
@staticmethod
def Equals(arg1):
'''
:param arg1: Object
:type arg1: System.Object
:returns: Boolean
:rtype: System.Boolean
'''
pass
@staticmethod
def get_name():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def set_name(arg1):
'''
:param arg1: String
:type arg1: System.String or str
'''
pass
@staticmethod
def ToString():
'''
:returns: String
:rtype: System.String
'''
pass
@staticmethod
def GetType():
'''
:returns: Type
:rtype: System.Type
'''
pass
| 22.344418
| 82
| 0.550601
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.