hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
18c446d9b6c444830ad97a1bd640dfe5bb7d4cc1
| 4,658
|
py
|
Python
|
turbo_transformers/python/tests/bert_intermediate_test.py
|
sneaxiy/TurboTransformers
|
31cfc3e17706320864244c63a9bab27916ea4e2b
|
[
"BSD-3-Clause"
] | 1
|
2020-06-07T06:24:41.000Z
|
2020-06-07T06:24:41.000Z
|
turbo_transformers/python/tests/bert_intermediate_test.py
|
alexshuang/TurboTransformers
|
923731429a8c6de6e90ccdb19032f445326cb2b9
|
[
"BSD-3-Clause"
] | null | null | null |
turbo_transformers/python/tests/bert_intermediate_test.py
|
alexshuang/TurboTransformers
|
923731429a8c6de6e90ccdb19032f445326cb2b9
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (C) 2020 THL A29 Limited, a Tencent company.
# All rights reserved.
# Licensed under the BSD 3-Clause License (the "License"); you may
# not use this file except in compliance with the License. You may
# obtain a copy of the License at
# https://opensource.org/licenses/BSD-3-Clause
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# See the AUTHORS file for names of contributors.
# Copyright (C) 2020 THL A29 Limited, a Tencent company.
# All rights reserved.
# Licensed under the BSD 3-Clause License (the "License"); you may
# not use this file except in compliance with the License. You may
# obtain a copy of the License at
# https://opensource.org/licenses/BSD-3-Clause
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
# See the AUTHORS file for names of contributors.
import unittest
import sys
import torch
import turbo_transformers
from transformers.modeling_bert import BertConfig, BertIntermediate
import numpy
import os
sys.path.append(os.path.dirname(__file__))
import test_helper
def create_test(batch_size, seq_length):
class TestBertIntermediate(unittest.TestCase):
def init_data(self, use_cuda: bool) -> None:
self.test_device = torch.device('cuda:0') if use_cuda else \
torch.device('cpu:0')
if not use_cuda:
torch.set_num_threads(1)
torch.set_grad_enabled(False)
self.cfg = BertConfig()
self.torch_intermediate = BertIntermediate(self.cfg)
if torch.cuda.is_available():
self.torch_intermediate.to(self.test_device)
self.torch_intermediate.eval()
self.turbo_intermediate = turbo_transformers.BertIntermediate.from_torch(
self.torch_intermediate)
def check_torch_and_turbo(self, use_cuda):
self.init_data(use_cuda=use_cuda)
device = "GPU" if use_cuda else "CPU"
num_iter = 2
hidden_size = self.cfg.hidden_size
input_tensor = torch.rand(size=(batch_size, seq_length,
hidden_size),
dtype=torch.float32,
device=self.test_device)
turbo_model = lambda: self.turbo_intermediate(input_tensor)
turbo_result, turbo_qps, turbo_time = \
test_helper.run_model(turbo_model, use_cuda, num_iter)
print(
f"BertIntermediate \"({batch_size},{seq_length:03})\" ",
f"{device} TurboTransform QPS, {turbo_qps}, time, {turbo_time}"
)
torch_model = lambda: self.torch_intermediate(input_tensor)
torch_result, torch_qps, torch_time = \
test_helper.run_model(torch_model, use_cuda, num_iter)
print(f"BertIntermediate \"({batch_size},{seq_length:03})\" ",
f"{device} Torch QPS, {torch_qps}, time, {torch_time}")
torch_result = torch_result.cpu().numpy()
turbo_result = turbo_result.cpu().numpy()
self.assertTrue(
numpy.allclose(torch_result,
turbo_result,
rtol=1e-4,
atol=1e-3))
with open("bert_intermediate_res.txt", "a") as fh:
fh.write(
f"\"({batch_size},{seq_length:03})\", {torch_qps}, {torch_qps}\n"
)
def test_intermediate(self):
self.check_torch_and_turbo(use_cuda=False)
if torch.cuda.is_available() and \
turbo_transformers.config.is_compiled_with_cuda():
self.check_torch_and_turbo(use_cuda=True)
globals()[f"TestBertIntermediate_{batch_size}_{seq_length:03}"] = \
TestBertIntermediate
with open("bert_intermediate_res.txt", "w") as fh:
fh.write(", torch, turbo_transformers\n")
for batch_size in [1, 2]:
for seq_length in [10, 16, 20, 24, 40, 48, 60, 64, 80, 100, 120, 128]:
create_test(batch_size, seq_length)
if __name__ == '__main__':
unittest.main()
| 39.811966
| 85
| 0.639116
|
7a7e06f1387b2c523c8389bcdca2bbd4ffe095d3
| 1,627
|
py
|
Python
|
BufferStockModel/figs.py
|
ThomasHJorgensen/ConsumptionSavingNotebooks
|
badbdfb1da226d5494026de2adcfec171c7f40ea
|
[
"MIT"
] | 1
|
2021-11-07T23:37:25.000Z
|
2021-11-07T23:37:25.000Z
|
BufferStockModel/figs.py
|
bbardoczy/ConsumptionSavingNotebooks
|
91811f784ec61fe2f11f8c9e0e172d085574f57c
|
[
"MIT"
] | null | null | null |
BufferStockModel/figs.py
|
bbardoczy/ConsumptionSavingNotebooks
|
91811f784ec61fe2f11f8c9e0e172d085574f57c
|
[
"MIT"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
plt.style.use("seaborn-whitegrid")
prop_cycle = plt.rcParams["axes.prop_cycle"]
colors = prop_cycle.by_key()["color"]
import ipywidgets as widgets
def consumption_function(model,t):
# a. unpack
par = model.par
sol = model.sol
# b. figure
fig = plt.figure()
ax = fig.add_subplot(1,1,1,projection='3d')
p,m = np.meshgrid(par.grid_p, par.grid_m,indexing='ij')
# c. plot consumption
ax.plot_surface(p,m,sol.c[t,:,:],edgecolor='none',cmap=cm.viridis)
ax.set_title(f'$c$ ($t = {t})$',pad=10)
# d. details
ax.grid(True)
ax.set_xlabel('$p_t$')
ax.set_xlim([par.grid_p[0],par.grid_p[-1]])
ax.set_ylabel('$m_t$')
ax.set_ylim([par.grid_m[0],par.grid_m[-1]])
ax.invert_xaxis()
plt.show()
def consumption_function_interact(model):
widgets.interact(consumption_function,
model=widgets.fixed(model),
t=widgets.Dropdown(description='t',
options=list(range(model.par.T)), value=0),
)
def lifecycle(model):
# a. unpack
par = model.par
sim = model.sim
# b. figure
fig = plt.figure()
simvarlist = [('m','$m_t$'),
('c','$c_t$'),
('a','$a_t$')]
age = np.arange(par.T)
ax = fig.add_subplot(1,1,1)
for simvar,simvarlatex in simvarlist:
simdata = getattr(sim,simvar)
ax.plot(age,np.mean(simdata,axis=1),lw=2,label=simvarlatex)
ax.legend(frameon=True)
ax.grid(True)
ax.set_xlabel('age')
| 23.57971
| 70
| 0.609711
|
3dab8a0fc304d9f9b52e0750041cd8ddcb03bfe9
| 3,565
|
py
|
Python
|
tests/integration_tests/build/test_coverage.py
|
zlim/firecracker
|
03cb9fb3c50861195c2c5429ac18141d9482eadf
|
[
"Apache-2.0"
] | 1
|
2019-12-16T16:11:23.000Z
|
2019-12-16T16:11:23.000Z
|
tests/integration_tests/build/test_coverage.py
|
zlim/firecracker
|
03cb9fb3c50861195c2c5429ac18141d9482eadf
|
[
"Apache-2.0"
] | null | null | null |
tests/integration_tests/build/test_coverage.py
|
zlim/firecracker
|
03cb9fb3c50861195c2c5429ac18141d9482eadf
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Tests pertaining to line/branch test coverage for the Firecracker code base.
# TODO
- Put the coverage in `s3://spec.firecracker` and update it automatically.
target should be put in `s3://spec.firecracker` and automatically updated.
"""
import os
import platform
import re
from subprocess import run
import pytest
import host_tools.cargo_build as host # pylint: disable=import-error
COVERAGE_TARGET_PCT = 85.1
COVERAGE_MAX_DELTA = 0.01
CARGO_KCOV_REL_PATH = os.path.join(host.CARGO_BUILD_REL_PATH, 'kcov')
KCOV_COVERAGE_FILE = 'index.js'
"""kcov will aggregate coverage data in this file."""
KCOV_COVERAGE_REGEX = r'"covered":"(\d+\.\d)"'
"""Regex for extracting coverage data from a kcov output file."""
KCOV_COVERED_LINES_REGEX = r'"covered_lines":"(\d+)"'
"""Regex for extracting number of total covered lines found by kcov."""
KCOV_TOTAL_LINES_REGEX = r'"total_lines" : "(\d+)"'
"""Regex for extracting number of total executable lines found by kcov."""
@pytest.mark.timeout(400)
@pytest.mark.skipif(
platform.machine() != "x86_64",
reason="kcov hangs on aarch64"
)
def test_coverage(test_session_root_path, test_session_tmp_path):
"""Test line coverage with kcov.
The result is extracted from the $KCOV_COVERAGE_FILE file created by kcov
after a coverage run.
"""
exclude_pattern = (
'${CARGO_HOME:-$HOME/.cargo/},'
'build/,'
'tests/,'
'usr/lib/gcc,'
'lib/x86_64-linux-gnu/,'
# The following files/directories are auto-generated
'bootparam.rs,'
'elf.rs,'
'mpspec.rs,'
'msr_index.rs,'
'_gen'
)
exclude_region = '\'mod tests {\''
cmd = (
'CARGO_TARGET_DIR={} cargo kcov --all '
'--output {} -- '
'--exclude-pattern={} '
'--exclude-region={} --verify'
).format(
os.path.join(test_session_root_path, CARGO_KCOV_REL_PATH),
test_session_tmp_path,
exclude_pattern,
exclude_region
)
# By default, `cargo kcov` passes `--exclude-pattern=$CARGO_HOME --verify`
# to kcov. To pass others arguments, we need to include the defaults.
run(cmd, shell=True, check=True)
coverage_file = os.path.join(test_session_tmp_path, KCOV_COVERAGE_FILE)
with open(coverage_file) as cov_output:
contents = cov_output.read()
coverage = float(re.findall(KCOV_COVERAGE_REGEX, contents)[0])
covered_lines = int(re.findall(KCOV_COVERED_LINES_REGEX, contents)[0])
total_lines = int(re.findall(KCOV_TOTAL_LINES_REGEX, contents)[0])
print("Number of executable lines: " + str(total_lines))
print("Number of covered lines: " + str(covered_lines))
print("Thus, coverage is: " + str(coverage))
coverage_low_msg = (
'Current code coverage ({}%) is below the target ({}%).'
.format(coverage, COVERAGE_TARGET_PCT)
)
assert coverage >= COVERAGE_TARGET_PCT, coverage_low_msg
# Get the name of the variable that needs updating.
namespace = globals()
cov_target_name = [name for name in namespace if namespace[name]
is COVERAGE_TARGET_PCT][0]
coverage_high_msg = (
'Current code coverage ({}%) is above the target ({}%).\n'
'Please update the value of {}.'
.format(coverage, COVERAGE_TARGET_PCT, cov_target_name)
)
assert coverage - COVERAGE_TARGET_PCT <= COVERAGE_MAX_DELTA,\
coverage_high_msg
| 32.117117
| 79
| 0.670687
|
5e8d6559edc1e0fbc8ac8cf5d687c9701ffc4653
| 75,405
|
py
|
Python
|
pyocd/tools/pyocd.py
|
jsiverskog/pyOCD
|
8b75633482a2f1856a8ab6af9ebb5c1b2f9d8285
|
[
"Apache-2.0"
] | null | null | null |
pyocd/tools/pyocd.py
|
jsiverskog/pyOCD
|
8b75633482a2f1856a8ab6af9ebb5c1b2f9d8285
|
[
"Apache-2.0"
] | null | null | null |
pyocd/tools/pyocd.py
|
jsiverskog/pyOCD
|
8b75633482a2f1856a8ab6af9ebb5c1b2f9d8285
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# pyOCD debugger
# Copyright (c) 2015-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import argparse
import logging
import os
import sys
import optparse
from optparse import make_option
import six
import prettytable
import traceback
# Attempt to import readline.
try:
import readline
except ImportError:
pass
from .. import __version__
from .. import (utility, coresight)
from ..core.helpers import ConnectHelper
from ..core import (exceptions, session)
from ..target.family import target_kinetis
from ..probe.pydapaccess import DAPAccess
from ..probe.debug_probe import DebugProbe
from ..coresight.ap import MEM_AP
from ..core.target import Target
from ..flash.loader import (FlashEraser, FlashLoader, FileProgrammer)
from ..gdbserver.gdbserver import GDBServer
from ..utility import (mask, conversion)
from ..utility.cmdline import convert_session_options
from ..utility.hex import (format_hex_width, dump_hex_data)
from ..utility.progress import print_progress
# Make disasm optional.
try:
import capstone
isCapstoneAvailable = True # pylint: disable=invalid-name
except ImportError:
isCapstoneAvailable = False # pylint: disable=invalid-name
LOG = logging.getLogger(__name__)
LEVELS = {
'debug':logging.DEBUG,
'info':logging.INFO,
'warning':logging.WARNING,
'error':logging.ERROR,
'critical':logging.CRITICAL
}
CORE_STATUS_DESC = {
Target.TARGET_HALTED : "Halted",
Target.TARGET_RUNNING : "Running",
Target.TARGET_RESET : "Reset",
Target.TARGET_SLEEPING : "Sleeping",
Target.TARGET_LOCKUP : "Lockup",
}
VC_NAMES_MAP = {
Target.CATCH_HARD_FAULT : "hard fault",
Target.CATCH_BUS_FAULT : "bus fault",
Target.CATCH_MEM_FAULT : "memory fault",
Target.CATCH_INTERRUPT_ERR : "interrupt error",
Target.CATCH_STATE_ERR : "state error",
Target.CATCH_CHECK_ERR : "check error",
Target.CATCH_COPROCESSOR_ERR : "coprocessor error",
Target.CATCH_CORE_RESET : "core reset",
}
HPROT_BIT_DESC = {
0: ("instruction fetch", "data access"),
1: ("user", "privileged"),
2: ("non-bufferable", "bufferable"),
3: ("non-cacheable", "cacheable/modifiable"),
4: ("no cache lookup", "lookup in cache"),
5: ("no cache allocate", "allocate in cache"),
6: ("non-shareable", "shareable"),
}
WATCHPOINT_FUNCTION_NAME_MAP = {
Target.WATCHPOINT_READ: 'r',
Target.WATCHPOINT_WRITE: 'w',
Target.WATCHPOINT_READ_WRITE: 'rw',
'r': Target.WATCHPOINT_READ,
'w': Target.WATCHPOINT_WRITE,
'rw': Target.WATCHPOINT_READ_WRITE,
}
## Default SWD clock in Hz.
DEFAULT_CLOCK_FREQ_HZ = 1000000
## Command info and help.
COMMAND_INFO = {
'list' : {
'aliases' : [],
'args' : "",
'help' : "Show available targets"
},
'erase' : {
'aliases' : [],
'args' : "ADDR [COUNT]",
'help' : "Erase internal flash sectors"
},
'unlock' : {
'aliases' : [],
'args' : "",
'help' : "Unlock security on the target"
},
'status' : {
'aliases' : ['stat'],
'args' : "",
'help' : "Show the target's current state"
},
'reg' : {
'aliases' : [],
'args' : "[-f] [REG]",
'help' : "Print all or one register"
},
'wreg' : {
'aliases' : [],
'args' : "REG VALUE",
'help' : "Set the value of a register"
},
'reset' : {
'aliases' : [],
'args' : "[-h/--halt]",
'help' : "Reset the target"
},
'savemem' : {
'aliases' : [],
'args' : "ADDR LEN FILENAME",
"help" : "Save a range of memory to a binary file"
},
'loadmem' : {
'aliases' : [],
'args' : "ADDR FILENAME",
"help" : "Load a binary file to an address in memory (RAM or flash)"
},
'load' : {
'aliases' : [],
'args' : "FILENAME [ADDR]",
"help" : "Load a binary, hex, or elf file with optional base address"
},
'read8' : {
'aliases' : ['read', 'r', 'rb'],
'args' : "ADDR [LEN]",
'help' : "Read 8-bit bytes"
},
'read16' : {
'aliases' : ['r16', 'rh'],
'args' : "ADDR [LEN]",
'help' : "Read 16-bit halfwords"
},
'read32' : {
'aliases' : ['r32', 'rw'],
'args' : "ADDR [LEN]",
'help' : "Read 32-bit words"
},
'write8' : {
'aliases' : ['write', 'w', 'wb'],
'args' : "ADDR DATA...",
'help' : "Write 8-bit bytes to memory (RAM or flash)"
},
'write16' : {
'aliases' : ['w16', 'wh'],
'args' : "ADDR DATA...",
'help' : "Write 16-bit halfwords to memory (RAM or flash)"
},
'write32' : {
'aliases' : ['w32', 'ww'],
'args' : "ADDR DATA...",
'help' : "Write 32-bit words to memory (RAM or flash)"
},
'fill' : {
'aliases' : [],
'args' : "[SIZE] ADDR LEN PATTERN",
'help' : "Fill a range of memory with a pattern",
'extra_help' : "The optional SIZE parameter must be one of 8, 16, or 32. If not "
"provided, the size is determined by the pattern value's most "
"significant set bit."
},
'go' : {
'aliases' : ['g', 'continue', 'c'],
'args' : "",
'help' : "Resume execution of the target"
},
'step' : {
'aliases' : ['s'],
'args' : "",
'help' : "Step one instruction"
},
'halt' : {
'aliases' : ['h'],
'args' : "",
'help' : "Halt the target"
},
'break' : {
'aliases' : [],
'args' : "ADDR",
'help' : "Set a breakpoint address"
},
'rmbreak' : {
'aliases' : [],
'args' : "ADDR",
'help' : "Remove a breakpoint"
},
'lsbreak' : {
'aliases' : [],
'args' : "",
'help' : "List breakpoints"
},
'watch' : {
'aliases' : [],
'args' : "ADDR [r|w|rw] [1|2|4]",
'help' : "Set a watchpoint address, and optional access type (default rw) and size (4)."
},
'rmwatch' : {
'aliases' : [],
'args' : "ADDR",
'help' : "Remove a watchpoint"
},
'lswatch' : {
'aliases' : [],
'args' : "",
'help' : "List watchpoints"
},
'help' : {
'aliases' : ['?'],
'args' : "[CMD]",
'help' : "Show help for commands"
},
'disasm' : {
'aliases' : ['d'],
'args' : "[-c/--center] ADDR [LEN]",
'help' : "Disassemble instructions at an address",
'extra_help' : "Only available if the capstone library is installed. To install "
"capstone, run 'pip install capstone'."
},
'exit' : {
'aliases' : ['quit'],
'args' : "",
'help' : "Quit pyocd-tool"
},
'core' : {
'aliases' : [],
'args' : "[NUM]",
'help' : "Select CPU core by number or print selected core"
},
'readdp' : {
'aliases' : ['rdp'],
'args' : "ADDR",
'help' : "Read DP register"
},
'writedp' : {
'aliases' : ['wdp'],
'args' : "ADDR DATA",
'help' : "Write DP register"
},
'readap' : {
'aliases' : ['rap'],
'args' : "[APSEL] ADDR",
'help' : "Read AP register"
},
'writeap' : {
'aliases' : ['wap'],
'args' : "[APSEL] ADDR DATA",
'help' : "Write AP register"
},
'reinit' : {
'aliases' : [],
'args' : "",
'help' : "Reinitialize the target object"
},
'show' : {
'aliases' : [],
'args' : "INFO",
'help' : "Report info about the target",
},
'set' : {
'aliases' : [],
'args' : "NAME VALUE",
'help' : "Set an option value",
'extra_help' : "Available info names: vc, vectorcatch.",
},
'initdp' : {
'aliases' : [],
'args' : "",
'help' : "Init DP and power up debug.",
},
'makeap' : {
'aliases' : [],
'args' : "APSEL",
'help' : "Creates a new AP object for the given APSEL.",
'extra_help' : "The type of AP, MEM-AP or generic, is autodetected.",
},
'where' : {
'aliases' : [],
'args' : "[ADDR]",
'help' : "Show symbol, file, and line for address.",
'extra_help' : "The symbol name, source file path, and line number are displayed for the specified address. If no address is given then current PC is used. An ELF file must have been specified with the --elf option.",
},
'symbol' : {
'aliases' : [],
'args' : "NAME",
'help' : "Show a symbol's value.",
'extra_help' : "An ELF file must have been specified with the --elf option.",
},
'gdbserver' : {
'aliases' : [],
'args' : "ACTION",
'help' : "Start or stop the gdbserver.",
'extra_help' : "The action argument should be either 'start' or 'stop'. Use the 'gdbserver_port' and 'telnet_port' user options to control the ports the gdbserver uses.",
},
}
INFO_HELP = {
'map' : {
'aliases' : [],
'help' : "Target memory map.",
},
'peripherals' : {
'aliases' : [],
'help' : "List of target peripheral instances.",
},
'uid' : {
'aliases' : [],
'help' : "Target's unique ID",
},
'cores' : {
'aliases' : [],
'help' : "Information about CPU cores in the target.",
},
'target' : {
'aliases' : [],
'help' : "General target information.",
},
'fault' : {
'aliases' : [],
'help' : "Fault status information.",
'extra_help' : "By default, only asserted fields are shown. Add -a to command to show all fields.",
},
'vector-catch' : {
'aliases' : ['vc'],
'help' : "Show current vector catch settings.",
},
'step-into-interrupt' : {
'aliases' : ['si'],
'help' : "Display whether interrupts are enabled when single stepping."
},
'nreset' : {
'aliases' : [],
'help' : "Current nRESET signal state.",
},
'option' : {
'aliases' : [],
'help' : "Show the current value of one or more user options.",
},
'mem-ap' : {
'aliases' : [],
'help' : "Display the currently selected MEM-AP used for memory read/write commands."
},
'hnonsec' : {
'aliases' : [],
'help' : "Display the current HNONSEC value used by the selected MEM-AP."
},
'hprot' : {
'aliases' : [],
'help' : "Display the current HPROT value used by the selected MEM-AP."
},
}
OPTION_HELP = {
'vector-catch' : {
'aliases' : ['vc'],
'help' : "Control enabled vector catch sources.",
'extra_help' : "Value is a concatenation of one letter per enabled source in any order, or 'all' or 'none'. (h=hard fault, b=bus fault, m=mem fault, i=irq err, s=state err, c=check err, p=nocp, r=reset, a=all, n=none).",
},
'step-into-interrupt' : {
'aliases' : ['si'],
'help' : "Set whether to enable or disable interrupts when single stepping. Set to 1 to enable."
},
'nreset' : {
'aliases' : [],
'help' : "Set nRESET signal state. Accepts a value of 0 or 1."
},
'log' : {
'aliases' : [],
'help' : "Set log level to one of debug, info, warning, error, critical"
},
'clock' : {
'aliases' : [],
'help' : "Set SWD or JTAG clock frequency in kilohertz."
},
'option' : {
'aliases' : [],
'help' : "Change the value of one or more user options.",
'extra_help' : "Each parameter should follow the form OPTION=VALUE.",
},
'mem-ap' : {
'aliases' : [],
'help' : "Select the MEM-AP used for memory read/write commands."
},
'hnonsec' : {
'aliases' : [],
'help' : "Set the current HNONSEC value used by the selected MEM-AP."
},
'hprot' : {
'aliases' : [],
'help' : "Set the current HPROT value used by the selected MEM-AP."
},
}
ALL_COMMANDS = list(COMMAND_INFO.keys())
ALL_COMMANDS.extend(a for d in COMMAND_INFO.values() for a in d['aliases'])
ALL_COMMANDS.sort()
class ToolError(Exception):
pass
class ToolExitException(Exception):
pass
def cmdoptions(opts):
def process_opts(fn):
parser = optparse.OptionParser(add_help_option=False)
for opt in opts:
parser.add_option(opt)
def foo(inst, args):
namespace, other_args = parser.parse_args(args)
return fn(inst, namespace, other_args)
return foo
return process_opts
class PyOCDConsole(object):
PROMPT = '>>> '
def __init__(self, tool):
self.tool = tool
self.last_command = ''
def run(self):
try:
while True:
try:
line = six.moves.input(self.PROMPT)
line = line.strip()
if line:
self.process_command_line(line)
self.last_command = line
elif self.last_command:
self.process_command(self.last_command)
except KeyboardInterrupt:
print()
except EOFError:
# Print a newline when we get a Ctrl-D on a Posix system.
# Windows exits with a Ctrl-Z+Return, so there is no need for this.
if os.name != "nt":
print()
def process_command_line(self, line):
for cmd in line.split(';'):
self.process_command(cmd)
def process_command(self, cmd):
try:
firstChar = (cmd.strip())[0]
if firstChar in '$!':
cmd = cmd[1:].strip()
if firstChar == '$':
self.tool.handle_python(cmd)
elif firstChar == '!':
os.system(cmd)
return
args = utility.cmdline.split_command_line(cmd)
cmd = args[0].lower()
args = args[1:]
# Handle register name as command.
if cmd in coresight.cortex_m.CORE_REGISTER:
self.tool.handle_reg([cmd])
return
# Check for valid command.
if cmd not in self.tool.command_list:
print("Error: unrecognized command '%s'" % cmd)
return
# Run command.
handler = self.tool.command_list[cmd]
handler(args)
except ValueError:
print("Error: invalid argument")
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
except exceptions.TransferError as e:
print("Transfer failed:", e)
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
except ToolError as e:
print("Error:", e)
except ToolExitException:
raise
except Exception as e:
print("Error:", e)
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
class PyOCDCommander(object):
def __init__(self, args, cmds=None):
# Read command-line arguments.
self.args = args
self.cmds = cmds
self.session = None
self.board = None
self.target = None
self.probe = None
self.selected_ap = 0
self.did_erase = False
self.exit_code = 0
self.step_into_interrupt = False
self.elf = None
self._peripherals = {}
self._loaded_peripherals = False
self._gdbserver = None
self.command_list = {
'list' : self.handle_list,
'erase' : self.handle_erase,
'unlock' : self.handle_unlock,
'status' : self.handle_status,
'stat' : self.handle_status,
'reg' : self.handle_reg,
'wreg' : self.handle_write_reg,
'reset' : self.handle_reset,
'savemem' : self.handle_savemem,
'loadmem' : self.handle_loadmem,
'load' : self.handle_load,
'read' : self.handle_read8,
'read8' : self.handle_read8,
'read16' : self.handle_read16,
'read32' : self.handle_read32,
'r' : self.handle_read8,
'rb' : self.handle_read8,
'r16' : self.handle_read16,
'rh' : self.handle_read16,
'r32' : self.handle_read32,
'rw' : self.handle_read32,
'write' : self.handle_write8,
'write8' : self.handle_write8,
'write16' : self.handle_write16,
'write32' : self.handle_write32,
'w' : self.handle_write8,
'wb' : self.handle_write8,
'w16' : self.handle_write16,
'wh' : self.handle_write16,
'w32' : self.handle_write32,
'ww' : self.handle_write32,
'go' : self.handle_go,
'g' : self.handle_go,
'continue': self.handle_go,
'c' : self.handle_go,
'step' : self.handle_step,
's' : self.handle_step,
'halt' : self.handle_halt,
'h' : self.handle_halt,
'break' : self.handle_breakpoint,
'rmbreak' : self.handle_remove_breakpoint,
'lsbreak' : self.handle_list_breakpoints,
'watch' : self.handle_watchpoint,
'rmwatch' : self.handle_remove_watchpoint,
'lswatch' : self.handle_list_watchpoints,
'disasm' : self.handle_disasm,
'd' : self.handle_disasm,
'exit' : self.handle_exit,
'quit' : self.handle_exit,
'core' : self.handle_core,
'readdp' : self.handle_readdp,
'writedp' : self.handle_writedp,
'readap' : self.handle_readap,
'writeap' : self.handle_writeap,
'rdp' : self.handle_readdp,
'wdp' : self.handle_writedp,
'rap' : self.handle_readap,
'wap' : self.handle_writeap,
'reinit' : self.handle_reinit,
'show' : self.handle_show,
'set' : self.handle_set,
'help' : self.handle_help,
'where' : self.handle_where,
'?' : self.handle_help,
'initdp' : self.handle_initdp,
'makeap' : self.handle_makeap,
'symbol' : self.handle_symbol,
'gdbserver':self.handle_gdbserver,
'fill' : self.handle_fill,
}
self.info_list = {
'map' : self.handle_show_map,
'peripherals' : self.handle_show_peripherals,
'uid' : self.handle_show_unique_id,
'cores' : self.handle_show_cores,
'target' : self.handle_show_target,
'fault' : self.handle_show_fault,
'vector-catch' : self.handle_show_vectorcatch,
'vc' : self.handle_show_vectorcatch,
'step-into-interrupt' : self.handle_show_step_interrupts,
'si' : self.handle_show_step_interrupts,
'nreset' : self.handle_show_nreset,
'option' : self.handle_show_option,
'mem-ap' : self.handle_show_ap,
'hnonsec' : self.handle_show_hnonsec,
'hprot' : self.handle_show_hprot,
}
self.option_list = {
'vector-catch' : self.handle_set_vectorcatch,
'vc' : self.handle_set_vectorcatch,
'step-into-interrupt' : self.handle_set_step_interrupts,
'si' : self.handle_set_step_interrupts,
'nreset' : self.handle_set_nreset,
'log' : self.handle_set_log,
'clock' : self.handle_set_clock,
'option' : self.handle_set_option,
'mem-ap' : self.handle_set_ap,
'hnonsec' : self.handle_set_hnonsec,
'hprot' : self.handle_set_hprot,
}
def run(self):
try:
# If no commands, enter interactive mode.
if self.cmds is None:
if not self.connect():
return self.exit_code
# Print connected message, unless not initing.
if not self.args.no_init:
try:
# If the target is locked, we can't read the CPU state.
if self.target.is_locked():
status = "locked"
else:
try:
status = CORE_STATUS_DESC[self.target.get_state()]
except KeyError:
status = "<no core>"
# Say what we're connected to.
print("Connected to %s [%s]: %s" % (self.target.part_number,
status, self.board.unique_id))
except exceptions.TransferFaultError:
pass
# Run the command line.
console = PyOCDConsole(self)
console.run()
# Otherwise, run the list of commands we were given and exit. We only connect when
# there is a command that requires a connection (most do).
else:
didConnect = False
for args in self.cmds:
# Extract the command name.
cmd = args.pop(0).lower()
# Handle certain commands without connecting.
if cmd == 'list':
self.handle_list([])
continue
elif cmd == 'help':
self.handle_help(args)
continue
# For others, connect first.
elif not didConnect:
if not self.connect():
return self.exit_code
didConnect = True
# Invoke action handler.
result = self.command_list[cmd](args)
if result is not None:
self.exit_code = result
break
except ToolExitException:
self.exit_code = 0
except ValueError:
print("Error: invalid argument")
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
except exceptions.TransferError:
print("Error: transfer failed")
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
self.exit_code = 2
except ToolError as e:
print("Error:", e)
self.exit_code = 1
finally:
if self.session is not None:
self.session.close()
return self.exit_code
def connect(self):
if (self.args.frequency is not None) and (self.args.frequency != DEFAULT_CLOCK_FREQ_HZ):
print("Setting SWD clock to %d kHz" % (self.args.frequency // 1000))
options = convert_session_options(self.args.options)
# Set connect mode. If --halt is set then the connect mode is halt. If connect_mode is
# set through -O then use that. Otherwise default to attach.
if self.args.halt:
connect_mode = 'halt'
elif 'connect_mode' in options:
connect_mode = None
else:
connect_mode = 'attach'
# Connect to board.
self.session = ConnectHelper.session_with_chosen_probe(
blocking=(not self.args.no_wait),
project_dir=self.args.project_dir,
config_file=self.args.config,
user_script=self.args.script,
no_config=self.args.no_config,
pack=self.args.pack,
unique_id=self.args.unique_id,
target_override=self.args.target_override,
connect_mode=connect_mode,
frequency=self.args.frequency,
options=options,
option_defaults=dict(
auto_unlock=False,
resume_on_disconnect=False,
))
if self.session is None:
self.exit_code = 3
return False
self.board = self.session.board
try:
self.session.open(init_board=not self.args.no_init)
except exceptions.TransferFaultError as e:
if not self.board.target.is_locked():
print("Transfer fault while initing board: %s" % e)
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
self.exit_code = 1
return False
except Exception as e:
print("Exception while initing board: %s" % e)
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
self.exit_code = 1
return False
self.target = self.board.target
self.probe = self.session.probe
# Select the first core's MEM-AP by default.
if not self.args.no_init:
try:
self.selected_ap = self.target.selected_core.ap.ap_num
except IndexError:
for ap_num in sorted(self.target.aps.keys()):
if isinstance(self.target.aps[ap_num], MEM_AP):
self.selected_ap = ap_num
break
# Set elf file if provided.
if self.args.elf:
self.target.elf = os.path.expanduser(self.args.elf)
self.elf = self.target.elf
else:
self.elf = None
# Handle a device with flash security enabled.
if not self.args.no_init and self.target.is_locked():
print("Warning: Target is locked, limited operations available. Use unlock command to mass erase and unlock.")
return True
@property
def peripherals(self):
if self.target.svd_device and not self._loaded_peripherals:
for p in self.target.svd_device.peripherals:
self._peripherals[p.name.lower()] = p
self._loaded_peripherals = True
return self._peripherals
def handle_list(self, args):
ConnectHelper.list_connected_probes()
def handle_status(self, args):
if self.target.is_locked():
print("Security: Locked")
else:
print("Security: Unlocked")
if isinstance(self.target, target_kinetis.Kinetis):
print("MDM-AP Status: 0x%08x" % self.target.mdm_ap.read_reg(target_kinetis.MDM_STATUS))
if not self.target.is_locked():
for i, c in enumerate(self.target.cores):
core = self.target.cores[c]
print("Core %d status: %s" % (i, CORE_STATUS_DESC[core.get_state()]))
def handle_reg(self, args):
# If there are no args, print all register values.
if len(args) < 1:
self.dump_registers()
return
if len(args) == 2 and args[0].lower() == '-f':
del args[0]
show_fields = True
else:
show_fields = False
reg = args[0].lower()
if reg in coresight.cortex_m.CORE_REGISTER:
value = self.target.read_core_register(reg)
if isinstance(value, six.integer_types):
print("%s = 0x%08x (%d)" % (reg, value, value))
elif type(value) is float:
print("%s = %g" % (reg, value))
else:
raise ToolError("Unknown register value type")
else:
subargs = reg.split('.')
if subargs[0] in self.peripherals:
p = self.peripherals[subargs[0]]
if len(subargs) > 1:
r = [x for x in p.registers if x.name.lower() == subargs[1]]
if len(r):
self._dump_peripheral_register(p, r[0], True)
else:
raise ToolError("invalid register '%s' for %s" % (subargs[1], p.name))
else:
for r in p.registers:
self._dump_peripheral_register(p, r, show_fields)
else:
raise ToolError("invalid peripheral '%s'" % (subargs[0]))
def handle_write_reg(self, args):
if len(args) < 1:
raise ToolError("No register specified")
if len(args) < 2:
raise ToolError("No value specified")
reg = args[0].lower()
if reg in coresight.cortex_m.CORE_REGISTER:
if (reg.startswith('s') and reg != 'sp') or reg.startswith('d'):
value = float(args[1])
else:
value = self.convert_value(args[1])
self.target.write_core_register(reg, value)
else:
value = self.convert_value(args[1])
subargs = reg.split('.')
if len(subargs) < 2:
raise ToolError("no register specified")
if subargs[0] in self.peripherals:
p = self.peripherals[subargs[0]]
r = [x for x in p.registers if x.name.lower() == subargs[1]]
if len(r):
r = r[0]
addr = p.base_address + r.address_offset
if len(subargs) == 2:
print("writing 0x%x to 0x%x:%d (%s)" % (value, addr, r.size, r.name))
self.target.write_memory(addr, value, r.size)
elif len(subargs) == 3:
f = [x for x in r.fields if x.name.lower() == subargs[2]]
if len(f):
f = f[0]
msb = f.bit_offset + f.bit_width - 1
lsb = f.bit_offset
originalValue = self.target.read_memory(addr, r.size)
value = mask.bfi(originalValue, msb, lsb, value)
print("writing 0x%x to 0x%x[%d:%d]:%d (%s.%s)" % (value, addr, msb, lsb, r.size, r.name, f.name))
self.target.write_memory(addr, value, r.size)
else:
raise ToolError("too many dots")
self._dump_peripheral_register(p, r, True)
else:
raise ToolError("invalid register '%s' for %s" % (subargs[1], p.name))
else:
raise ToolError("invalid peripheral '%s'" % (subargs[0]))
@cmdoptions([make_option('-h', "--halt", action="store_true")])
def handle_reset(self, args, other):
print("Resetting target")
if args.halt:
self.target.reset_and_halt()
status = self.target.get_state()
if status != Target.TARGET_HALTED:
print("Failed to halt device on reset")
else:
print("Successfully halted device on reset")
else:
self.target.reset()
def handle_set_nreset(self, args):
if len(args) != 1:
print("Missing reset state")
return
state = int(args[0], base=0)
print("nRESET = %d" % (state))
self.probe.assert_reset((state == 0))
@cmdoptions([make_option('-c', "--center", action="store_true")])
def handle_disasm(self, args, other):
if len(other) == 0:
print("Error: no address specified")
return 1
addr = self.convert_value(other[0])
if len(other) < 2:
count = 6
else:
count = self.convert_value(other[1])
if args.center:
addr -= count // 2
# Since we're disassembling, make sure the Thumb bit is cleared.
addr &= ~1
# Print disasm of data.
data = self.target.read_memory_block8(addr, count)
self.print_disasm(bytes(bytearray(data)), addr)
def handle_read8(self, args):
return self.do_read(args, 8)
def handle_read16(self, args):
return self.do_read(args, 16)
def handle_read32(self, args):
return self.do_read(args, 32)
def handle_write8(self, args):
return self.do_write(args, 8)
def handle_write16(self, args):
return self.do_write(args, 16)
def handle_write32(self, args):
return self.do_write(args, 32)
def handle_savemem(self, args):
if len(args) < 3:
print("Error: missing argument")
return 1
addr = self.convert_value(args[0])
count = self.convert_value(args[1])
filename = args[2]
region = self.session.target.memory_map.get_region_for_address(addr)
flash_init_required = region is not None and region.is_flash and not region.is_powered_on_boot and region.flash is not None
if flash_init_required:
region.flash.init(region.flash.Operation.VERIFY)
data = bytearray(self.target.aps[self.selected_ap].read_memory_block8(addr, count))
if flash_init_required:
region.flash.cleanup()
with open(filename, 'wb') as f:
f.write(data)
print("Saved %d bytes to %s" % (count, filename))
def handle_loadmem(self, args):
if len(args) < 2:
print("Error: missing argument")
return 1
addr = self.convert_value(args[0])
filename = args[1]
with open(filename, 'rb') as f:
data = bytearray(f.read())
if self.is_flash_write(addr, 8, data):
FlashLoader.program_binary_data(self.session, addr, data)
else:
self.target.aps[self.selected_ap].write_memory_block8(addr, data)
print("Loaded %d bytes to 0x%08x" % (len(data), addr))
def handle_load(self, args):
if len(args) < 1:
print("Error: missing argument")
return 1
filename = args[0]
if len(args) > 1:
addr = self.convert_value(args[1])
else:
addr = None
programmer = FileProgrammer(self.session, progress=print_progress())
programmer.program(filename, base_address=addr)
# fill [SIZE] ADDR LEN PATTERN
def handle_fill(self, args):
if len(args) == 3:
size = None
addr = self.convert_value(args[0])
length = self.convert_value(args[1])
pattern = self.convert_value(args[2])
elif len(args) == 4:
size = int(args[0])
if size not in (8, 16, 32):
raise ToolError("invalid size argument")
addr = self.convert_value(args[1])
length = self.convert_value(args[2])
pattern = self.convert_value(args[3])
else:
print("Error: missing argument")
return 1
# Determine size by the highest set bit in the pattern.
if size is None:
highest = mask.msb(pattern)
if highest < 8:
size = 8
elif highest < 16:
size = 16
elif highest < 32:
size = 32
else:
raise ToolError("invalid pattern size (MSB is %d)" % highest)
# Create word-sized byte lists.
if size == 8:
pattern_str = "0x%02x" % (pattern & 0xff)
pattern = [pattern]
elif size == 16:
pattern_str = "0x%04x" % (pattern & 0xffff)
pattern = conversion.u16le_list_to_byte_list([pattern])
elif size == 32:
pattern_str = "0x%08x" % (pattern & 0xffffffff)
pattern = conversion.u32le_list_to_byte_list([pattern])
# Divide into 32 kB chunks.
CHUNK_SIZE = 32 * 1024
chunk_count = (length + CHUNK_SIZE - 1) // CHUNK_SIZE
end_addr = addr + length
print("Filling 0x%08x-0x%08x with pattern %s" % (addr, end_addr - 1, pattern_str))
for chunk in range(chunk_count):
# Get this chunk's size.
chunk_size = min(end_addr - addr, CHUNK_SIZE)
print("Wrote %d bytes @ 0x%08x" % (chunk_size, addr))
# Construct data for the chunk.
if size == 8:
data = pattern * chunk_size
elif size == 16:
data = (pattern * ((chunk_size + 1) // 2))[:chunk_size]
elif size == 32:
data = (pattern * ((chunk_size + 3) // 4))[:chunk_size]
# Write to target.
self.target.aps[self.selected_ap].write_memory_block8(addr, data)
addr += chunk_size
def do_read(self, args, width):
if len(args) == 0:
print("Error: no address specified")
return 1
addr = self.convert_value(args[0])
if len(args) < 2:
count = width // 8
else:
count = self.convert_value(args[1])
if width == 8:
data = self.target.aps[self.selected_ap].read_memory_block8(addr, count)
byteData = data
elif width == 16:
byteData = self.target.aps[self.selected_ap].read_memory_block8(addr, count)
data = utility.conversion.byte_list_to_u16le_list(byteData)
elif width == 32:
byteData = self.target.aps[self.selected_ap].read_memory_block8(addr, count)
data = utility.conversion.byte_list_to_u32le_list(byteData)
# Print hex dump of output.
dump_hex_data(data, addr, width=width)
def do_write(self, args, width):
if len(args) == 0:
print("Error: no address specified")
return 1
addr = self.convert_value(args[0])
if len(args) <= 1:
print("Error: no data for write")
return 1
else:
data = [self.convert_value(d) for d in args[1:]]
if width == 8:
pass
elif width == 16:
data = utility.conversion.u16le_list_to_byte_list(data)
elif width == 32:
data = utility.conversion.u32le_list_to_byte_list(data)
if self.is_flash_write(addr, width, data):
# Look up flash region.
region = self.session.target.memory_map.get_region_for_address(addr)
if not region:
print("address 0x%08x is not within a memory region" % addr)
return 1
if not region.is_flash:
print("address 0x%08x is not in flash" % addr)
return 1
assert region.flash is not None
# Program phrase to flash.
region.flash.init(region.flash.Operation.PROGRAM)
region.flash.program_phrase(addr, data)
region.flash.cleanup()
else:
self.target.aps[self.selected_ap].write_memory_block8(addr, data)
self.target.flush()
def handle_erase(self, args):
if len(args) < 1:
raise ToolError("invalid arguments")
addr = self.convert_value(args[0])
if len(args) < 2:
count = 1
else:
count = self.convert_value(args[1])
eraser = FlashEraser(self.session, FlashEraser.Mode.SECTOR)
while count:
# Look up the flash region so we can get the page size.
region = self.session.target.memory_map.get_region_for_address(addr)
if not region:
print("address 0x%08x is not within a memory region" % addr)
break
if not region.is_flash:
print("address 0x%08x is not in flash" % addr)
break
# Erase this page.
eraser.erase([addr])
# Next page.
count -= 1
addr += region.blocksize
def handle_unlock(self, args):
# Currently the same as erase.
if not self.did_erase:
self.target.mass_erase()
def handle_go(self, args):
self.target.resume()
status = self.target.get_state()
if status == Target.TARGET_RUNNING:
print("Successfully resumed device")
elif status == Target.TARGET_SLEEPING:
print("Device entered sleep")
elif status == Target.TARGET_LOCKUP:
print("Device entered lockup")
elif status == Target.TARGET_RESET:
print("Device is being held in reset")
elif status == Target.TARGET_HALTED:
print("Device is halted; a debug event may have occurred")
else:
print("Unknown target status: %s" % status)
def handle_step(self, args):
self.target.step(disable_interrupts=not self.step_into_interrupt)
addr = self.target.read_core_register('pc')
if isCapstoneAvailable:
addr &= ~1
data = self.target.read_memory_block8(addr, 4)
self.print_disasm(bytes(bytearray(data)), addr, maxInstructions=1)
else:
print("PC = 0x%08x" % (addr))
def handle_halt(self, args):
self.target.halt()
status = self.target.get_state()
if status != Target.TARGET_HALTED:
print("Failed to halt device; target state is %s" % CORE_STATUS_DESC[status])
return 1
else:
print("Successfully halted device")
def handle_breakpoint(self, args):
if len(args) < 1:
raise ToolError("no breakpoint address provided")
addr = self.convert_value(args[0])
if self.target.set_breakpoint(addr):
self.target.selected_core.bp_manager.flush()
print("Set breakpoint at 0x%08x" % addr)
else:
print("Failed to set breakpoint at 0x%08x" % addr)
def handle_remove_breakpoint(self, args):
if len(args) < 1:
raise ToolError("no breakpoint address provided")
addr = self.convert_value(args[0])
try:
type = self.target.get_breakpoint_type(addr)
self.target.remove_breakpoint(addr)
self.target.selected_core.bp_manager.flush()
print("Removed breakpoint at 0x%08x" % addr)
except:
print("Failed to remove breakpoint at 0x%08x" % addr)
def handle_list_breakpoints(self, args):
if self.target.selected_core.dwt is None:
print("DWT not present")
return
availableBpCount = self.target.selected_core.available_breakpoint_count
print("%d hardware breakpoints available" % availableBpCount)
bps = self.target.selected_core.bp_manager.get_breakpoints()
if not len(bps):
print("No breakpoints installed")
else:
for i, addr in enumerate(bps):
print("%d: 0x%08x" % (i, addr))
def handle_watchpoint(self, args):
if self.target.selected_core.dwt is None:
print("DWT not present")
return
if len(args) < 1:
raise ToolError("no watchpoint address provided")
addr = self.convert_value(args[0])
if len(args) > 1:
try:
wptype = WATCHPOINT_FUNCTION_NAME_MAP[args[1]]
except KeyError:
raise ToolError("unsupported watchpoint type '%s'", args[1])
else:
wptype = Target.WATCHPOINT_READ_WRITE
if len(args) > 2:
sz = self.convert_value(args[2])
if sz not in (1, 2, 4):
raise ToolError("unsupported watchpoint size (%d)", sz)
else:
sz = 4
if self.target.set_watchpoint(addr, sz, wptype):
print("Set watchpoint at 0x%08x" % addr)
else:
print("Failed to set watchpoint at 0x%08x" % addr)
def handle_remove_watchpoint(self, args):
if self.target.selected_core.dwt is None:
print("DWT not present")
return
if len(args) < 1:
raise ToolError("no watchpoint address provided")
addr = self.convert_value(args[0])
try:
type = self.target.get_breakpoint_type(addr)
self.target.remove_watchpoint(addr)
print("Removed watchpoint at 0x%08x" % addr)
except:
print("Failed to remove watchpoint at 0x%08x" % addr)
def handle_list_watchpoints(self, args):
if self.target.selected_core.dwt is None:
print("DWT not present")
return
availableWpCount = self.target.selected_core.dwt.watchpoint_count
print("%d hardware watchpoints available" % availableWpCount)
wps = self.target.selected_core.dwt.get_watchpoints()
if not len(wps):
print("No watchpoints installed")
else:
for i, wp in enumerate(wps):
# TODO fix requirement to access WATCH_TYPE_TO_FUNCT
print("%d: 0x%08x, %d bytes, %s" % (
i, wp.addr, wp.size,
WATCHPOINT_FUNCTION_NAME_MAP[self.target.selected_core.dwt.WATCH_TYPE_TO_FUNCT[wp.func]]))
def handle_set_log(self, args):
if len(args) < 1:
print("Error: no log level provided")
return 1
if args[0].lower() not in LEVELS:
print("Error: log level must be one of {%s}" % ','.join(LEVELS.keys()))
return 1
logging.getLogger().setLevel(LEVELS[args[0].lower()])
def handle_set_clock(self, args):
if len(args) < 1:
print("Error: no clock frequency provided")
return 1
try:
freq_Hz = self.convert_value(args[0]) * 1000
except:
print("Error: invalid frequency")
return 1
self.probe.set_clock(freq_Hz)
if self.probe.wire_protocol == DebugProbe.Protocol.SWD:
swd_jtag = 'SWD'
elif self.probe.wire_protocol == DebugProbe.Protocol.JTAG:
swd_jtag = 'JTAG'
else:
swd_jtag = '??'
if freq_Hz >= 1000000:
nice_freq = "%.2f MHz" % (freq_Hz / 1000000)
elif freq_Hz > 1000:
nice_freq = "%.2f kHz" % (freq_Hz / 1000)
else:
nice_freq = "%d Hz" % freq_Hz
print("Changed %s frequency to %s" % (swd_jtag, nice_freq))
def handle_exit(self, args):
raise ToolExitException()
def handle_python(self, args):
try:
import pyocd
env = {
'session' : self.session,
'board' : self.board,
'target' : self.target,
'probe' : self.probe,
'link' : self.probe, # Old name
'dp' : self.target.dp,
'aps' : self.target.dp.aps,
'elf' : self.elf,
'map' : self.target.memory_map,
'pyocd' : pyocd,
}
result = eval(args, globals(), env)
if result is not None:
if isinstance(result, six.integer_types):
print("0x%08x (%d)" % (result, result))
else:
print(result)
except Exception as e:
print("Exception while executing expression:", e)
if session.Session.get_current().log_tracebacks:
traceback.print_exc()
def handle_core(self, args):
if len(args) < 1:
print("Core %d is selected" % self.target.selected_core.core_number)
return
core = int(args[0], base=0)
self.target.select_core(core)
print("Selected core %d" % core)
def handle_readdp(self, args):
if len(args) < 1:
print("Missing DP address")
return
addr = self.convert_value(args[0])
result = self.target.dp.read_reg(addr)
print("DP register 0x%x = 0x%08x" % (addr, result))
def handle_writedp(self, args):
if len(args) < 1:
print("Missing DP address")
return
if len(args) < 2:
print("Missing value")
return
addr = self.convert_value(args[0])
data = self.convert_value(args[1])
self.target.dp.write_reg(addr, data)
def handle_readap(self, args):
if len(args) < 1:
print("Missing AP address")
return
if len(args) == 1:
addr = self.convert_value(args[0])
elif len(args) == 2:
addr = (self.convert_value(args[0]) << 24) | self.convert_value(args[1])
result = self.target.dp.read_ap(addr)
print("AP register 0x%x = 0x%08x" % (addr, result))
def handle_writeap(self, args):
if len(args) < 1:
print("Missing AP address")
return
if len(args) < 2:
print("Missing value")
return
if len(args) == 2:
addr = self.convert_value(args[0])
data_arg = 1
elif len(args) == 3:
addr = (self.convert_value(args[0]) << 24) | self.convert_value(args[1])
data_arg = 2
data = self.convert_value(args[data_arg])
self.target.dp.write_ap(addr, data)
def handle_initdp(self, args):
self.target.dp.init()
self.target.dp.power_up_debug()
def handle_makeap(self, args):
if len(args) < 1:
print("Missing APSEL")
return
apsel = self.convert_value(args[0])
if apsel in self.target.aps:
print("AP with APSEL=%d already exists" % apsel)
return
exists = coresight.ap.AccessPort.probe(self.target.dp, apsel)
if not exists:
print("Error: no AP with APSEL={} exists".format(apsel))
return
ap = coresight.ap.AccessPort.create(self.target.dp, apsel)
self.target.dp.aps[apsel] = ap # Same mutable list is target.aps
print("AP#{:d} IDR = {:#010x}".format(apsel, ap.idr))
def handle_where(self, args):
if self.elf is None:
print("No ELF available")
return
if len(args) >= 1:
addr = self.convert_value(args[0])
else:
addr = self.target.read_core_register('pc')
lineInfo = self.elf.address_decoder.get_line_for_address(addr)
if lineInfo is not None:
path = os.path.join(lineInfo.dirname, lineInfo.filename).decode()
line = lineInfo.line
pathline = "{}:{}".format(path, line)
else:
pathline = "<unknown file>"
fnInfo = self.elf.address_decoder.get_function_for_address(addr)
if fnInfo is not None:
name = fnInfo.name.decode()
else:
name = "<unknown symbol>"
print("{addr:#10x} : {fn} : {pathline}".format(addr=addr, fn=name, pathline=pathline))
def handle_symbol(self, args):
if self.elf is None:
print("No ELF available")
return
if len(args) < 1:
raise ToolError("missing symbol name argument")
name = args[0]
sym = self.elf.symbol_decoder.get_symbol_for_name(name)
if sym is not None:
if sym.type == 'STT_FUNC':
name += "()"
print("{name}: {addr:#10x} {sz:#x}".format(name=name, addr=sym.address, sz=sym.size))
else:
print("No symbol named '{}' was found".format(name))
def handle_gdbserver(self, args):
if len(args) < 1:
raise ToolError("missing action argument")
action = args[0].lower()
if action == 'start':
if self._gdbserver is None:
self._gdbserver = GDBServer(self.session, core=self.target.selected_core.core_number)
else:
print("gdbserver is already running")
elif action == 'stop':
if self._gdbserver is not None:
self._gdbserver.stop()
self._gdbserver = None
else:
print("gdbserver is not running")
else:
print("Invalid action")
def handle_reinit(self, args):
self.target.init()
def handle_show(self, args):
if len(args) < 1:
raise ToolError("missing info name argument")
infoName = args[0]
try:
self.info_list[infoName](args[1:])
except KeyError:
raise ToolError("unknown info name '%s'" % infoName)
def handle_show_unique_id(self, args):
print("Unique ID: %s" % self.board.unique_id)
def handle_show_target(self, args):
print("Target: %s" % self.target.part_number)
print("DAP IDCODE: 0x%08x" % self.target.dp.dpidr)
def handle_show_cores(self, args):
if self.target.is_locked():
print("Target is locked")
else:
print("Cores: %d" % len(self.target.cores))
for i, c in enumerate(self.target.cores):
core = self.target.cores[c]
print("Core %d type: %s" % (i, coresight.cortex_m.CORE_TYPE_NAME[core.core_type]))
def handle_show_map(self, args):
pt = prettytable.PrettyTable(["Region", "Start", "End", "Size", "Access", "Sector", "Page"])
pt.align = 'l'
pt.border = False
for region in self.target.get_memory_map():
pt.add_row([
region.name,
"0x%08x" % region.start,
"0x%08x" % region.end,
"0x%08x" % region.length,
region.access,
("0x%08x" % region.sector_size) if region.is_flash else '-',
("0x%08x" % region.page_size) if region.is_flash else '-',
])
print(pt)
def handle_show_peripherals(self, args):
for periph in sorted(self.peripherals.values(), key=lambda x:x.base_address):
print("0x%08x: %s" % (periph.base_address, periph.name))
def handle_show_fault(self, args):
showAll = ('-a' in args)
CFSR = 0xe000ed28
HFSR = 0xe000ed2c
DFSR = 0xe000ed30
MMFAR = 0xe000ed34
BFAR = 0xe000ed38
AFSR = 0xe000ed3c
MMFSR_fields = [
('IACCVIOL', 0),
('DACCVIOL', 1),
('MUNSTKERR', 3),
('MSTKERR', 4),
# ('MMARVALID', 7),
]
BFSR_fields = [
('IBUSERR', 0),
('PRECISERR', 1),
('IMPRECISERR', 2),
('UNSTKERR', 3),
('STKERR', 4),
('LSPERR', 5),
# ('BFARVALID', 7),
]
UFSR_fields = [
('UNDEFINSTR', 0),
('INVSTATE', 1),
('INVPC', 2),
('NOCP', 3),
('STKOF', 4),
('UNALIGNED', 8),
('DIVBYZERO', 9),
]
HFSR_fields = [
('VECTTBL', 1),
('FORCED', 30),
('DEBUGEVT', 31),
]
DFSR_fields = [
('HALTED', 0),
('BKPT', 1),
('DWTTRAP', 2),
('VCATCH', 3),
('EXTERNAL', 4),
]
def print_fields(regname, value, fields, showAll):
if value == 0 and not showAll:
return
print(" %s = 0x%08x" % (regname, value))
for name, bitpos in fields:
bit = (value >> bitpos) & 1
if showAll or bit != 0:
print(" %s = 0x%x" % (name, bit))
cfsr = self.target.read32(CFSR)
mmfsr = cfsr & 0xff
bfsr = (cfsr >> 8) & 0xff
ufsr = (cfsr >> 16) & 0xffff
hfsr = self.target.read32(HFSR)
dfsr = self.target.read32(DFSR)
mmfar = self.target.read32(MMFAR)
bfar = self.target.read32(BFAR)
print_fields('MMFSR', mmfsr, MMFSR_fields, showAll)
if showAll or mmfsr & (1 << 7): # MMFARVALID
print(" MMFAR = 0x%08x" % (mmfar))
print_fields('BFSR', bfsr, BFSR_fields, showAll)
if showAll or bfsr & (1 << 7): # BFARVALID
print(" BFAR = 0x%08x" % (bfar))
print_fields('UFSR', ufsr, UFSR_fields, showAll)
print_fields('HFSR', hfsr, HFSR_fields, showAll)
print_fields('DFSR', dfsr, DFSR_fields, showAll)
def handle_show_nreset(self, args):
rst = int(not self.probe.is_reset_asserted())
print("nRESET = {}".format(rst))
def handle_show_option(self, args):
if len(args) < 1:
raise ToolError("missing user option name argument")
for name in args:
try:
value = self.session.options[name]
print("Option '%s' = %s" % (name, value))
except KeyError:
print("No option with name '%s'" % name)
def handle_show_ap(self, args):
print("MEM-AP #{} is selected".format(self.selected_ap))
def handle_show_hnonsec(self, args):
print("MEM-AP #{} HNONSEC = {} ({})".format(
self.selected_ap,
self.target.aps[self.selected_ap].hnonsec,
("nonsecure" if self.target.aps[self.selected_ap].hnonsec else "secure")))
def handle_show_hprot(self, args):
hprot = self.target.aps[self.selected_ap].hprot
print("MEM-AP #{} HPROT = {:#x}".format(
self.selected_ap,
hprot))
desc = ""
for bitnum in range(7):
bitvalue = (hprot >> bitnum) & 1
desc += " HPROT[{}] = {:#x} ({})\n".format(
bitnum,
bitvalue,
HPROT_BIT_DESC[bitnum][bitvalue])
print(desc, end='')
def handle_set(self, args):
if len(args) < 1:
raise ToolError("missing option name argument")
name = args[0]
try:
self.option_list[name](args[1:])
except KeyError:
raise ToolError("unkown option name '%s'" % name)
def handle_show_vectorcatch(self, args):
catch = self.target.get_vector_catch()
print("Vector catch:")
for mask in sorted(VC_NAMES_MAP.keys()):
name = VC_NAMES_MAP[mask]
s = "ON" if (catch & mask) else "OFF"
print(" {:3} {}".format(s, name))
def handle_set_vectorcatch(self, args):
if len(args) == 0:
print("Missing vector catch setting")
return
try:
self.target.set_vector_catch(utility.cmdline.convert_vector_catch(args[0]))
except ValueError as e:
print(e)
def handle_show_step_interrupts(self, args):
print("Interrupts while stepping:", ("enabled" if self.step_into_interrupt else "disabled"))
def handle_set_step_interrupts(self, args):
if len(args) == 0:
print("Missing argument")
return
self.step_into_interrupt = (args[0] in ('1', 'true', 'yes', 'on'))
def handle_set_ap(self, args):
if len(args) == 0:
print("Missing argument")
return
ap_num = int(args[0], base=0)
if ap_num not in self.target.aps:
print("Invalid AP number {}".format(ap_num))
return
ap = self.target.aps[ap_num]
if not isinstance(ap, MEM_AP):
print("AP #{} is not a MEM-AP".format(ap_num))
return
self.selected_ap = ap_num
def handle_set_hnonsec(self, args):
if len(args) == 0:
print("Missing argument")
return
value = int(args[0], base=0)
self.target.aps[self.selected_ap].hnonsec = value
def handle_set_hprot(self, args):
if len(args) == 0:
print("Missing argument")
return
value = int(args[0], base=0)
self.target.aps[self.selected_ap].hprot = value
def handle_help(self, args):
if not args:
self._list_commands("Commands", COMMAND_INFO, "{cmd:<25} {args:<20} {help}")
print("""
All register names are also available as commands that print the register's value.
Any ADDR or LEN argument will accept a register name.
Prefix line with $ to execute a Python expression.
Prefix line with ! to execute a shell command.""")
print()
self._list_commands("Info", INFO_HELP, "{cmd:<25} {help}")
print()
self._list_commands("Options", OPTION_HELP, "{cmd:<25} {help}")
else:
cmd = args[0].lower()
try:
subcmd = args[1].lower()
except IndexError:
subcmd = None
def print_help(cmd, commandList, usageFormat):
for name, info in commandList.items():
if cmd == name or cmd in info['aliases']:
print(("Usage: " + usageFormat).format(cmd=name, **info))
if len(info['aliases']):
print("Aliases:", ", ".join(info['aliases']))
print(info['help'])
if 'extra_help' in info:
print(info['extra_help'])
if subcmd is None:
print_help(cmd, COMMAND_INFO, "{cmd} {args}")
if cmd == "show":
print()
self._list_commands("Info", INFO_HELP, "{cmd:<25} {help}")
elif cmd == "set":
print()
self._list_commands("Options", OPTION_HELP, "{cmd:<25} {help}")
elif cmd == 'show':
print_help(subcmd, INFO_HELP, "show {cmd}")
elif cmd == 'set':
print_help(subcmd, OPTION_HELP, "set {cmd} VALUE")
else:
print("Error: invalid arguments")
def handle_set_option(self, args):
if len(args) < 1:
raise ToolError("missing user option setting")
opts = convert_session_options(args)
self.session.options.update(opts)
def _list_commands(self, title, commandList, helpFormat):
print(title + ":\n" + ("-" * len(title)))
for cmd in sorted(commandList.keys()):
info = commandList[cmd]
aliases = ', '.join(sorted([cmd] + info['aliases']))
print(helpFormat.format(cmd=aliases, **info))
def is_flash_write(self, addr, width, data):
mem_map = self.board.target.get_memory_map()
region = mem_map.get_region_for_address(addr)
if (region is None) or (not region.is_flash):
return False
if width == 8:
l = len(data)
elif width == 16:
l = len(data) * 2
elif width == 32:
l = len(data) * 4
return region.contains_range(addr, length=l)
def convert_value(self, arg):
"""! @brief Convert an argument to a 32-bit integer.
Handles the usual decimal, binary, and hex numbers with the appropriate prefix.
Also recognizes register names and address dereferencing. Dereferencing using the
ARM assembler syntax. To dereference, put the value in brackets, i.e. '[r0]' or
'[0x1040]'. You can also use put an offset in the brackets after a comma, such as
'[r3,8]'. The offset can be positive or negative, and any supported base.
"""
deref = (arg[0] == '[')
if deref:
arg = arg[1:-1]
offset = 0
if ',' in arg:
arg, offset = arg.split(',')
arg = arg.strip()
offset = int(offset.strip(), base=0)
value = None
if arg.lower() in coresight.cortex_m.CORE_REGISTER:
value = self.target.read_core_register(arg.lower())
print("%s = 0x%08x" % (arg.lower(), value))
else:
subargs = arg.lower().split('.')
if subargs[0] in self.peripherals and len(subargs) > 1:
p = self.peripherals[subargs[0]]
r = [x for x in p.registers if x.name.lower() == subargs[1]]
if len(r):
value = p.base_address + r[0].address_offset
else:
raise ToolError("invalid register '%s' for %s" % (subargs[1], p.name))
elif self.elf is not None:
sym = self.elf.symbol_decoder.get_symbol_for_name(arg)
if sym is not None:
value = sym.address
if value is None:
arg = arg.lower().replace('_', '')
value = int(arg, base=0)
if deref:
value = utility.conversion.byte_list_to_u32le_list(self.target.read_memory_block8(value + offset, 4))[0]
print("[%s,%d] = 0x%08x" % (arg, offset, value))
return value
def dump_registers(self):
# Registers organized into columns for display.
regs = ['r0', 'r6', 'r12',
'r1', 'r7', 'sp',
'r2', 'r8', 'lr',
'r3', 'r9', 'pc',
'r4', 'r10', 'xpsr',
'r5', 'r11', 'primask']
for i, reg in enumerate(regs):
regValue = self.target.read_core_register(reg)
print("{:>8} {:#010x} ".format(reg + ':', regValue), end=' ')
if i % 3 == 2:
print()
def _dump_peripheral_register(self, periph, reg, show_fields):
size = reg.size or 32
addr = periph.base_address + reg.address_offset
value = self.target.read_memory(addr, size)
value_str = format_hex_width(value, size)
print("%s.%s @ %08x = %s" % (periph.name, reg.name, addr, value_str))
if show_fields:
for f in reg.fields:
if f.is_reserved:
continue
msb = f.bit_offset + f.bit_width - 1
lsb = f.bit_offset
f_value = mask.bfx(value, msb, lsb)
v_enum = None
if f.enumerated_values:
for v in f.enumerated_values:
if v.value == f_value:
v_enum = v
break
if f.bit_width == 1:
bits_str = "%d" % lsb
else:
bits_str = "%d:%d" % (msb, lsb)
f_value_str = "%x" % f_value
digits = (f.bit_width + 3) // 4
f_value_str = "0" * (digits - len(f_value_str)) + f_value_str
f_value_bin_str = bin(f_value)[2:]
f_value_bin_str = "0" * (f.bit_width - len(f_value_bin_str)) + f_value_bin_str
if v_enum:
f_value_enum_str = " %s: %s" % (v.name, v_enum.description)
else:
f_value_enum_str = ""
print(" %s[%s] = %s (%s)%s" % (f.name, bits_str, f_value_str, f_value_bin_str, f_value_enum_str))
def print_disasm(self, code, startAddr, maxInstructions=None):
if not isCapstoneAvailable:
print("Warning: Disassembly is not available because the Capstone library is not installed. "
"To install Capstone, run 'pip install capstone'.")
return
if self.target.is_halted():
pc = self.target.read_core_register('pc') & ~1
else:
pc = -1
md = capstone.Cs(capstone.CS_ARCH_ARM, capstone.CS_MODE_THUMB)
addrLine = 0
text = ''
n = 0
for i in md.disasm(code, startAddr):
hexBytes = ''
for b in i.bytes:
hexBytes += '%02x' % b
pc_marker = '*' if (pc == i.address) else ' '
text += "{addr:#010x}:{pc_marker} {bytes:<10}{mnemonic:<8}{args}\n".format(addr=i.address, pc_marker=pc_marker, bytes=hexBytes, mnemonic=i.mnemonic, args=i.op_str)
n += 1
if (maxInstructions is not None) and (n >= maxInstructions):
break
print(text)
class PyOCDTool(object):
def get_args(self):
debug_levels = list(LEVELS.keys())
epi = "Available commands:\n" + ', '.join(ALL_COMMANDS)
parser = argparse.ArgumentParser(description='Target inspection utility', epilog=epi)
parser.add_argument('--version', action='version', version=__version__)
parser.add_argument('-j', '--dir', metavar="PATH", dest="project_dir",
help="Set the project directory. Defaults to the directory where pyocd was run.")
parser.add_argument('--config', metavar="PATH", default=None, help="Use a YAML config file.")
parser.add_argument("--no-config", action="store_true", default=None, help="Do not use a configuration file.")
parser.add_argument('--script', metavar="PATH",
help="Use the specified user script. Defaults to pyocd_user.py.")
parser.add_argument("--pack", metavar="PATH", help="Path to a CMSIS Device Family Pack")
parser.add_argument("-H", "--halt", action="store_true", default=None, help="Halt core upon connect.")
parser.add_argument("-N", "--no-init", action="store_true", help="Do not init debug system.")
parser.add_argument('-k', "--clock", metavar='KHZ', default=(DEFAULT_CLOCK_FREQ_HZ // 1000), type=int, help="Set SWD speed in kHz. (Default 1 MHz.)")
parser.add_argument('-b', "--board", action='store', dest="unique_id", metavar='ID', help="Use the specified board. Only a unique part of the board ID needs to be provided.")
parser.add_argument('-t', "--target", action='store', metavar='TARGET', help="Override target.")
parser.add_argument('-e', "--elf", metavar="PATH", help="Optionally specify ELF file being debugged.")
parser.add_argument("-d", "--debug", dest="debug_level", choices=debug_levels, default='warning', help="Set the level of system logging output. Supported choices are: " + ", ".join(debug_levels), metavar="LEVEL")
parser.add_argument("cmd", nargs='?', default=None, help="Command")
parser.add_argument("args", nargs='*', help="Arguments for the command.")
parser.add_argument("-da", "--daparg", dest="daparg", nargs='+', help="Send setting to DAPAccess layer.")
parser.add_argument("-O", "--option", dest="options", metavar="OPTION", action="append", help="Set session option of form 'OPTION=VALUE'.")
parser.add_argument("-W", "--no-wait", action="store_true", help="Do not wait for a probe to be connected if none are available.")
parser.add_argument("--no-deprecation-warning", action="store_true", help="Do not warn about pyocd-tool being deprecated.")
return parser.parse_args()
def configure_logging(self):
level = LEVELS.get(self.args.debug_level, logging.WARNING)
logging.basicConfig(level=level)
def run(self):
# Read command-line arguments.
self.args = self.get_args()
if self.args.cmd is not None:
self.cmd = [[self.args.cmd] + self.args.args]
else:
self.cmd = None
# Set logging level
self.configure_logging()
DAPAccess.set_args(self.args.daparg)
if not self.args.no_deprecation_warning:
LOG.warning("pyocd-tool is deprecated; please use the new combined pyocd tool.")
# Convert args to new names.
self.args.target_override = self.args.target
self.args.frequency = self.args.clock * 1000
commander = PyOCDCommander(self.args, self.cmd)
return commander.run()
def main():
sys.exit(PyOCDTool().run())
if __name__ == '__main__':
main()
| 37.968278
| 232
| 0.513335
|
27007008b2d6e8374a4ee6dd93ebbaebfe74c477
| 3,754
|
py
|
Python
|
server/python/app.py
|
paypay-ayas/paypay-sample-ecommerce
|
04e99105edb4c9180e4fac197e44e3713a7dbd92
|
[
"Apache-2.0"
] | null | null | null |
server/python/app.py
|
paypay-ayas/paypay-sample-ecommerce
|
04e99105edb4c9180e4fac197e44e3713a7dbd92
|
[
"Apache-2.0"
] | null | null | null |
server/python/app.py
|
paypay-ayas/paypay-sample-ecommerce
|
04e99105edb4c9180e4fac197e44e3713a7dbd92
|
[
"Apache-2.0"
] | null | null | null |
from flask import Flask, request, jsonify
from flask_cors import CORS
import paypayopa
import polling
import uuid
import os
import json
_DEBUG = os.environ.get("_DEBUG", default=True)
API_KEY = os.environ.get("API_KEY")
API_SECRET = os.environ.get("API_SECRET")
FRONTEND_PATH = "http://localhost:8080/orderpayment"
if not API_KEY:
raise ValueError("No API_KEY set for Flask application")
if not API_SECRET:
raise ValueError("No API_SECRET set for Flask application")
client = paypayopa.Client(
auth=(API_KEY, API_SECRET),
production_mode=False) # Set True for Production Environment. By Default this is set False for Sandbox Environment
client.set_assume_merchant("MUNE_CAKE_SHOP")
app = Flask(__name__)
cors = CORS(app)
app.config['CORS_HEADERS'] = 'Content-Type'
CAKES = [
{
"title": "cake_shop.mississippi",
"id": 1,
"price": 120,
"image": "darkforest.png"
}, {
"title": "cake_shop.red_velvet",
"id": 2,
"price": 190,
"image": "redvelvet.png"
}, {
"title": "cake_shop.dark_forest",
"id": 3,
"price": 100,
"image": "darkforestcake.png"
}, {
"title": "cake_shop.rainbow",
"id": 4,
"price": 200,
"image": 'rainbow.png'
}, {
"title": "cake_shop.lemon",
"id": 5,
"price": 80,
"image": 'lemon.png'
}, {
"title": "cake_shop.pineapple",
"id": 6,
"price": 110,
"image": 'pineapple.png'
}, {
"title": "cake_shop.banana",
"id": 7,
"price": 90,
"image": 'banana.png'
}, {
"title": "cake_shop.carrot",
"id": 8,
"price": 165,
"image": 'carrot.png'
}, {
"title": "cake_shop.choco",
"id": 9,
"price": 77,
"image": 'choco.png'
}, {
"title": "cake_shop.chocochip",
"id": 10,
"price": 130,
"image": 'chocochip.png'
}, {
"title": "cake_shop.orange",
"id": 11,
"price": 140,
"image": 'orange.png'
}, {
"title": "cake_shop.butterscotch",
"id": 12,
"price": 155,
"image": 'butterscotch.png'
},
]
@app.route('/', methods=['GET', 'OPTIONS'])
def index():
return jsonify(apiStatus="running")
# sanity check route
@app.route('/cakes', methods=['GET', 'OPTIONS'])
def get_cakes():
return jsonify(CAKES)
@app.route('/create-qr', methods=['POST'])
def creat_qr():
req = request.json
print(req)
client = paypayopa.Client(auth=(API_KEY, API_SECRET))
merchant_payment_id = uuid.uuid4().hex
payment_details = {
"merchantPaymentId": merchant_payment_id,
"codeType": "ORDER_QR",
"orderItems": req["orderItems"],
"amount": req["amount"],
"redirectUrl": "{}/{}".format(FRONTEND_PATH, merchant_payment_id),
"redirectType": "WEB_LINK",
}
resp = client.code.create_qr_code(data=payment_details)
return json.dumps(resp)
def is_correct_response(resp):
print(resp)
return resp
def fetch_payment_details(merchant_id):
resp = client.code.get_payment_details(merchant_id)
if (resp['data'] == 'None'):
return {
'error': 'true'
}
return resp['data']['status']
@app.route('/order-status/<merch_id>', methods=['GET', 'OPTIONS'])
def order_status(merch_id):
print(merch_id)
polling.poll(
lambda: fetch_payment_details(merch_id) == 'COMPLETED' or fetch_payment_details(merch_id) == 'FAILED',
check_success=is_correct_response,
step=2,
timeout=240)
return client.code.get_payment_details(merch_id)
if __name__ == '__main__':
app.run(debug=_DEBUG)
| 24.860927
| 119
| 0.581513
|
f43bbe873098b5e144357c129622723850384036
| 1,236
|
py
|
Python
|
develop/src/db_model.py
|
SecondDim/crawler-base
|
21ba30a3f6a62f2eaee336331abeca04d2a4ed24
|
[
"MIT"
] | 11
|
2019-12-21T14:57:17.000Z
|
2021-07-15T17:32:10.000Z
|
develop/src/db_model.py
|
SecondDim/crawler-base
|
21ba30a3f6a62f2eaee336331abeca04d2a4ed24
|
[
"MIT"
] | 6
|
2020-01-24T13:26:01.000Z
|
2022-02-01T23:05:28.000Z
|
develop/src/db_model.py
|
SecondDim/crawler-base
|
21ba30a3f6a62f2eaee336331abeca04d2a4ed24
|
[
"MIT"
] | 3
|
2020-02-28T06:07:20.000Z
|
2021-01-07T09:58:47.000Z
|
# -*- coding: utf-8 -*-
from datetime import datetime
from pony.orm import *
db_model = Database()
class QueueUrlEttoday(db_model.Entity):
url = Required(str, unique=True)
fetch_state = Required(bool)
fetch_date = Optional(datetime, default=datetime.min)
create_date = Required(datetime, sql_default='CURRENT_TIMESTAMP')
remark = Optional(str)
class Article(db_model.Entity):
url = Required(str, unique=True)
website = Required(str)
tags = Optional(Json)
title = Required(str)
html = Required(LongStr)
text = Required(LongStr)
article_date = Required(datetime)
create_date = Required(datetime, sql_default='CURRENT_TIMESTAMP')
links = Set("ArticleLinks")
urls = Set("ArticleImages")
remark = Optional(str)
class ArticleLinks(db_model.Entity):
article = Required(Article)
name = Optional(str)
url = Required(str, unique=True)
create_date = Required(datetime, sql_default='CURRENT_TIMESTAMP')
remark = Optional(str)
class ArticleImages(db_model.Entity):
article = Required(Article)
alt = Optional(str)
url = Required(str, unique=True)
create_date = Required(datetime, sql_default='CURRENT_TIMESTAMP')
remark = Optional(str)
| 27.466667
| 69
| 0.705502
|
694758e86cdf05efb606c1287dcfef55e6c24ec0
| 19,423
|
py
|
Python
|
pw_transfer/integration_test/cross_language_integration_test.py
|
Tiggerlaboratoriet/pigweed
|
7d7e7ad6223433f45af680f43ab4d75e23ad3257
|
[
"Apache-2.0"
] | null | null | null |
pw_transfer/integration_test/cross_language_integration_test.py
|
Tiggerlaboratoriet/pigweed
|
7d7e7ad6223433f45af680f43ab4d75e23ad3257
|
[
"Apache-2.0"
] | null | null | null |
pw_transfer/integration_test/cross_language_integration_test.py
|
Tiggerlaboratoriet/pigweed
|
7d7e7ad6223433f45af680f43ab4d75e23ad3257
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Copyright 2022 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""Cross-language test of pw_transfer.
Usage:
bazel run pw_transfer/integration_test:cross_language_integration_test
Command-line arguments must be provided after a double-dash:
bazel run pw_transfer/integration_test:cross_language_integration_test -- \
--server-port 3304
Which tests to run can be specified as command-line arguments:
bazel run pw_transfer/integration_test:cross_language_integration_test -- \
PwTransferIntegrationTest.test_small_client_write_1_java
"""
import argparse
import asyncio
import logging
from parameterized import parameterized
import pathlib
import random
import sys
import tempfile
import time
from typing import List
import unittest
from google.protobuf import text_format
from pigweed.pw_transfer.integration_test import config_pb2
from rules_python.python.runfiles import runfiles
_LOG = logging.getLogger('pw_transfer_intergration_test_proxy')
_LOG.level = logging.DEBUG
_LOG.addHandler(logging.StreamHandler(sys.stdout))
class LogMonitor():
"""Monitors lines read from the reader, and logs them."""
class Error(Exception):
"""Raised if wait_for_line reaches EOF before expected line."""
pass
def __init__(self, prefix: str, reader: asyncio.StreamReader):
"""Initializer.
Args:
prefix: Prepended to read lines before they are logged.
reader: StreamReader to read lines from.
"""
self._prefix = prefix
self._reader = reader
# Queue of messages waiting to be monitored.
self._queue = asyncio.Queue()
# Relog any messages read from the reader, and enqueue them for
# monitoring.
self._relog_and_enqueue_task = asyncio.create_task(
self._relog_and_enqueue())
async def wait_for_line(self, msg: str):
"""Wait for a line containing msg to be read from the reader."""
while True:
line = await self._queue.get()
if not line:
raise LogMonitor.Error(
f"Reached EOF before getting line matching {msg}")
if msg in line.decode():
return
async def wait_for_eof(self):
"""Wait for the reader to reach EOF, relogging any lines read."""
# Drain the queue, since we're not monitoring it any more.
drain_queue = asyncio.create_task(self._drain_queue())
await asyncio.gather(drain_queue, self._relog_and_enqueue_task)
async def _relog_and_enqueue(self):
"""Reads lines from the reader, logs them, and puts them in queue."""
while True:
line = await self._reader.readline()
await self._queue.put(line)
if line:
_LOG.info(f"{self._prefix} {line.decode().rstrip()}")
else:
# EOF. Note, we still put the EOF in the queue, so that the
# queue reader can process it appropriately.
return
async def _drain_queue(self):
while True:
line = await self._queue.get()
if not line:
# EOF.
return
class MonitoredSubprocess:
"""A subprocess with monitored asynchronous communication."""
@staticmethod
async def create(cmd: List[str], prefix: str, stdinput: bytes):
"""Starts the subprocess and writes stdinput to stdin.
This method returns once stdinput has been written to stdin. The
MonitoredSubprocess continues to log the process's stderr and stdout
(with the prefix) until it terminates.
Args:
cmd: Command line to execute.
prefix: Prepended to process logs.
stdinput: Written to stdin on process startup.
"""
self = MonitoredSubprocess()
self._process = await asyncio.create_subprocess_exec(
*cmd,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE)
self._stderr_monitor = LogMonitor(f"{prefix} ERR:",
self._process.stderr)
self._stdout_monitor = LogMonitor(f"{prefix} OUT:",
self._process.stdout)
self._process.stdin.write(stdinput)
await self._process.stdin.drain()
self._process.stdin.close()
await self._process.stdin.wait_closed()
return self
async def wait_for_line(self, stream: str, msg: str, timeout: float):
"""Wait for a line containing msg to be read on the stream."""
if stream == "stdout":
monitor = self._stdout_monitor
elif stream == "stderr":
monitor = self._stderr_monitor
else:
raise ValueError(
"Stream must be 'stdout' or 'stderr', got {stream}")
await asyncio.wait_for(monitor.wait_for_line(msg), timeout)
def returncode(self):
return self._process.returncode
def terminate(self):
"""Terminate the process."""
self._process.terminate()
async def wait_for_termination(self, timeout: float):
"""Wait for the process to terminate."""
await asyncio.wait_for(
asyncio.gather(self._process.wait(),
self._stdout_monitor.wait_for_eof(),
self._stderr_monitor.wait_for_eof()), timeout)
async def terminate_and_wait(self, timeout: float):
"""Terminate the process and wait for it to exit."""
if self.returncode() is not None:
# Process already terminated
return
self.terminate()
await self.wait_for_termination(timeout)
# TODO(b/232805936): Extend tests to use different resource IDs and do multiple
# reads/writes.
class PwTransferIntegrationTest(unittest.TestCase):
# Prefix for log messages coming from the harness (as opposed to the server,
# client, or proxy processes). Padded so that the length is the same as
# "SERVER OUT:".
_PREFIX = "HARNESS: "
SERVER_PORT = 3300
CLIENT_PORT = 3301
JAVA_CLIENT_BINARY = None
CPP_CLIENT_BINARY = None
PYTHON_CLIENT_BINARY = None
PROXY_BINARY = None
SERVER_BINARY = None
@classmethod
def setUpClass(cls):
# TODO(tpudlik): This is Bazel-only. Support gn, too.
r = runfiles.Create()
# For each binary used by this test, get the binaries produced by the
# build system if an override has not been specified.
if cls.JAVA_CLIENT_BINARY is None:
cls.JAVA_CLIENT_BINARY = r.Rlocation(
"pigweed/pw_transfer/integration_test/java_client")
if cls.CPP_CLIENT_BINARY is None:
cls.CPP_CLIENT_BINARY = r.Rlocation(
"pigweed/pw_transfer/integration_test/cpp_client")
if cls.PYTHON_CLIENT_BINARY is None:
cls.PYTHON_CLIENT_BINARY = r.Rlocation(
"pigweed/pw_transfer/integration_test/python_client")
if cls.PROXY_BINARY is None:
cls.PROXY_BINARY = r.Rlocation(
"pigweed/pw_transfer/integration_test/proxy")
if cls.SERVER_BINARY is None:
cls.SERVER_BINARY = r.Rlocation(
"pigweed/pw_transfer/integration_test/server")
cls._CLIENT_BINARY = {
"cpp": cls.CPP_CLIENT_BINARY,
"java": cls.JAVA_CLIENT_BINARY,
"python": cls.PYTHON_CLIENT_BINARY,
}
async def _start_client(self, client_type: str,
config: config_pb2.ClientConfig):
_LOG.info(f"{self._PREFIX} Starting client with config\n{config}")
self._client = await MonitoredSubprocess.create(
[self._CLIENT_BINARY[client_type],
str(self.CLIENT_PORT)], "CLIENT",
str(config).encode('ascii'))
async def _start_server(self, config: config_pb2.ServerConfig):
_LOG.info(f"{self._PREFIX} Starting server with config\n{config}")
self._server = await MonitoredSubprocess.create(
[self.SERVER_BINARY, str(self.SERVER_PORT)], "SERVER",
str(config).encode('ascii'))
async def _start_proxy(self, config: config_pb2.ProxyConfig):
_LOG.info(f"{self._PREFIX} Starting proxy with config\n{config}")
self._proxy = await MonitoredSubprocess.create(
[
self.PROXY_BINARY, "--server-port",
str(self.SERVER_PORT), "--client-port",
str(self.CLIENT_PORT)
],
# Extra space in "PROXY " so that it lines up with "SERVER".
"PROXY ",
str(config).encode('ascii'))
async def _perform_write(self, server_config: config_pb2.ServerConfig,
client_type: str,
client_config: config_pb2.ClientConfig,
proxy_config: config_pb2.ProxyConfig) -> None:
"""Performs a pw_transfer write.
Args:
server_config: Server configuration.
client_type: Either "cpp", "java", or "python".
client_config: Client configuration.
proxy_config: Proxy configuration.
"""
# Timeout for components (server, proxy) to come up or shut down after
# write is finished or a signal is sent. Approximately arbitrary. Should
# not be too long so that we catch bugs in the server that prevent it
# from shutting down.
TIMEOUT = 5 # seconds
try:
await self._start_proxy(proxy_config)
await self._proxy.wait_for_line("stderr",
"Listening for client connection",
TIMEOUT)
await self._start_server(server_config)
await self._server.wait_for_line("stderr",
"Starting pw_rpc server on port",
TIMEOUT)
await self._start_client(client_type, client_config)
# No timeout: the client will only exit once the transfer
# completes, and this can take a long time for large payloads.
await self._client.wait_for_termination(None)
self.assertEqual(self._client.returncode(), 0)
# Wait for the server to exit.
await self._server.wait_for_termination(TIMEOUT)
self.assertEqual(self._server.returncode(), 0)
finally:
# Stop the server, if still running. (Only expected if the
# wait_for above timed out.)
if self._server:
await self._server.terminate_and_wait(TIMEOUT)
# Stop the proxy. Unlike the server, we expect it to still be
# running at this stage.
if self._proxy:
await self._proxy.terminate_and_wait(TIMEOUT)
@parameterized.expand([
("cpp"),
("java"),
("python"),
])
def test_small_client_write(self, client_type):
payload = b"some data"
server_config = config_pb2.ServerConfig(
chunk_size_bytes=216,
pending_bytes=32 * 1024,
chunk_timeout_seconds=5,
transfer_service_retries=4,
extend_window_divisor=32,
)
client_config = config_pb2.ClientConfig(
max_retries=5,
initial_chunk_timeout_ms=10000,
chunk_timeout_ms=4000,
)
proxy_config = text_format.Parse(
"""
client_filter_stack: [
{ hdlc_packetizer: {} },
{ data_dropper: {rate: 0.01, seed: 1649963713563718435} }
]
server_filter_stack: [
{ hdlc_packetizer: {} },
{ data_dropper: {rate: 0.01, seed: 1649963713563718436} }
]""", config_pb2.ProxyConfig())
resource_id = 12
with tempfile.NamedTemporaryFile(
) as f_payload, tempfile.NamedTemporaryFile() as f_server_output:
server_config.resources[resource_id].destination_paths.append(
f_server_output.name)
client_config.transfer_actions.append(
config_pb2.TransferAction(
resource_id=resource_id,
file_path=f_payload.name,
transfer_type=config_pb2.TransferAction.TransferType.
WRITE_TO_SERVER))
f_payload.write(payload)
f_payload.flush() # Ensure contents are there to read!
asyncio.run(
self._perform_write(server_config, client_type, client_config,
proxy_config))
self.assertEqual(f_server_output.read(), payload)
@parameterized.expand([
("cpp"),
("java"),
("python"),
])
def test_3mb_write_dropped_data(self, client_type):
server_config = config_pb2.ServerConfig(
chunk_size_bytes=216,
pending_bytes=32 * 1024,
chunk_timeout_seconds=5,
transfer_service_retries=4,
extend_window_divisor=32,
)
client_config = config_pb2.ClientConfig(
max_retries=5,
initial_chunk_timeout_ms=10000,
chunk_timeout_ms=4000,
)
proxy_config = text_format.Parse(
"""
client_filter_stack: [
{ rate_limiter: {rate: 50000} },
{ hdlc_packetizer: {} },
{ data_dropper: {rate: 0.01, seed: 1649963713563718435} }
]
server_filter_stack: [
{ rate_limiter: {rate: 50000} },
{ hdlc_packetizer: {} },
{ data_dropper: {rate: 0.01, seed: 1649963713563718436} }
]""", config_pb2.ProxyConfig())
payload = random.Random(1649963713563718437).randbytes(3 * 1024 * 1024)
resource_id = 12
with tempfile.NamedTemporaryFile(
) as f_payload, tempfile.NamedTemporaryFile() as f_server_output:
server_config.resources[resource_id].destination_paths.append(
f_server_output.name)
client_config.transfer_actions.append(
config_pb2.TransferAction(
resource_id=resource_id,
file_path=f_payload.name,
transfer_type=config_pb2.TransferAction.TransferType.
WRITE_TO_SERVER))
f_payload.write(payload)
f_payload.flush() # Ensure contents are there to read!
asyncio.run(
self._perform_write(server_config, client_type, client_config,
proxy_config))
self.assertEqual(f_server_output.read(), payload)
@parameterized.expand([
("cpp"),
("java"),
("python"),
])
def test_3mb_write_reordered_data(self, client_type):
server_config = config_pb2.ServerConfig(
chunk_size_bytes=216,
pending_bytes=32 * 1024,
chunk_timeout_seconds=5,
transfer_service_retries=4,
extend_window_divisor=32,
)
client_config = config_pb2.ClientConfig(
max_retries=5,
initial_chunk_timeout_ms=10000,
chunk_timeout_ms=4000,
)
proxy_config = text_format.Parse(
"""
client_filter_stack: [
{ rate_limiter: {rate: 50000} },
{ hdlc_packetizer: {} },
{ data_transposer: {rate: 0.005, timeout: 0.5, seed: 1649963713563718435} }
]
server_filter_stack: [
{ rate_limiter: {rate: 50000} },
{ hdlc_packetizer: {} },
{ data_transposer: {rate: 0.005, timeout: 0.5, seed: 1649963713563718435} }
]""", config_pb2.ProxyConfig())
payload = random.Random(1649963713563718437).randbytes(3 * 1024 * 1024)
resource_id = 12
with tempfile.NamedTemporaryFile(
) as f_payload, tempfile.NamedTemporaryFile() as f_server_output:
server_config.resources[resource_id].destination_paths.append(
f_server_output.name)
client_config.transfer_actions.append(
config_pb2.TransferAction(
resource_id=resource_id,
file_path=f_payload.name,
transfer_type=config_pb2.TransferAction.TransferType.
WRITE_TO_SERVER))
f_payload.write(payload)
f_payload.flush() # Ensure contents are there to read!
asyncio.run(
self._perform_write(server_config, client_type, client_config,
proxy_config))
self.assertEqual(f_server_output.read(), payload)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--server-port',
type=int,
help=
'Port of the integration test server. The proxy will forward connections to this port',
)
parser.add_argument(
'--client-port',
type=int,
help=
'Port on which to listen for connections from integration test client.',
)
parser.add_argument(
'--java-client',
type=pathlib.Path,
default=None,
help='Path to the Java transfer client to use in tests',
)
parser.add_argument(
'--cpp-client',
type=pathlib.Path,
default=None,
help='Path to the C++ transfer client to use in tests',
)
parser.add_argument(
'--python-client',
type=pathlib.Path,
default=None,
help='Path to the Python transfer client to use in tests',
)
parser.add_argument(
'--server',
type=pathlib.Path,
default=None,
help='Path to the transfer server to use in tests',
)
parser.add_argument(
'--proxy',
type=pathlib.Path,
default=None,
help=('Path to the proxy binary to use in tests to allow interception '
'of client/server data'),
)
(args, passthrough_args) = parser.parse_known_args()
if args.server_port:
PwTransferIntegrationTest.SERVER_PORT = args.server_port
if args.client_port:
PwTransferIntegrationTest.CLIENT_PORT = args.client_port
PwTransferIntegrationTest.JAVA_CLIENT_BINARY = args.java_client
PwTransferIntegrationTest.CPP_CLIENT_BINARY = args.cpp_client
PwTransferIntegrationTest.PYTHON_CLIENT_BINARY = args.python_client
PwTransferIntegrationTest.SERVER_BINARY = args.server
PwTransferIntegrationTest.PROXY_BINARY = args.proxy
unittest_args = [sys.argv[0]] + passthrough_args
unittest.main(argv=unittest_args)
| 37.351923
| 96
| 0.609278
|
a9689fea606dbed51c1787b785234244caa74978
| 666
|
py
|
Python
|
setup.py
|
vojtek/queuing
|
8c6f6ab4913c06e84d461b27feea989d3ce3cea6
|
[
"MIT"
] | null | null | null |
setup.py
|
vojtek/queuing
|
8c6f6ab4913c06e84d461b27feea989d3ce3cea6
|
[
"MIT"
] | null | null | null |
setup.py
|
vojtek/queuing
|
8c6f6ab4913c06e84d461b27feea989d3ce3cea6
|
[
"MIT"
] | null | null | null |
import pathlib
from setuptools import setup
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(name='queuing',
version='0.3.1',
description='Multithreating producent-consumer solution',
long_description=README,
long_description_content_type="text/markdown",
url='https://github.com/vojtek/queuing',
author='Wojciech Kolodziej',
author_email='vojtekkol@o2.pl',
license='MIT',
packages=['queuing'],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
])
| 28.956522
| 63
| 0.644144
|
48f8d58bd9a31fdac933d1530a79ae35ec90e37d
| 610
|
py
|
Python
|
September 2020/05-Functions-Advanced/07-Chairs.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
September 2020/05-Functions-Advanced/07-Chairs.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
September 2020/05-Functions-Advanced/07-Chairs.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
#Вариант само с for цикъл:
from itertools import combinations
people = input().split(", ")
chairs = int(input())
combinations = combinations(people, chairs)
for combination in combinations:
print(', '.join(combination))
# #Вариант с функция:
# from itertools import combinations
#
#
# def test(people, chairs):
# people_comb = list(combinations(people, chairs))
# return people_comb
#
#
# # for i in test([name for name in input().split(", ")], int(input())):
# # print(', '.join(i))
#
#
# print('\n'.join([', '.join(i) for i in test([name for name in input().split(", ")], int(input()))]))
| 24.4
| 102
| 0.644262
|
aa28bcbc13ec546e45e0ae7963ae4a9f4f98652e
| 4,188
|
py
|
Python
|
scripts/semdep_eval.py
|
Dayitva/Parser-v3
|
45754bb722fabefdb18f67ab4c32a41d24114bca
|
[
"Apache-2.0"
] | 93
|
2018-08-07T02:54:47.000Z
|
2022-02-14T13:47:52.000Z
|
scripts/semdep_eval.py
|
Dayitva/Parser-v3
|
45754bb722fabefdb18f67ab4c32a41d24114bca
|
[
"Apache-2.0"
] | 10
|
2019-01-08T02:37:36.000Z
|
2021-01-09T07:45:02.000Z
|
scripts/semdep_eval.py
|
Dayitva/Parser-v3
|
45754bb722fabefdb18f67ab4c32a41d24114bca
|
[
"Apache-2.0"
] | 29
|
2018-07-31T09:08:03.000Z
|
2022-03-16T14:50:13.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 Timothy Dozat
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import Counter, namedtuple
import codecs
import sys
import numpy as np
#===============================================================
def compute_F1(gold_files, sys_files, labeled=False):
""""""
correct = 0
predicted = 0
actual = 0
n_tokens = 0
n_sequences = 0
current_seq_correct = False
n_correct_sequences = 0
current_fp = 0
current_sent = 0
for gold_file, sys_file in zip(gold_files, sys_files):
with codecs.open(gold_file, encoding='utf-8') as gf,\
codecs.open(sys_file, encoding='utf-8') as sf:
gold_line = gf.readline()
gold_i = 1
sys_i = 0
while gold_line:
while gold_line.startswith('#'):
current_sent += 1
gold_i += 1
n_sequences += 1
n_correct_sequences += current_seq_correct
current_seq_correct = True
gold_line = gf.readline()
if gold_line.rstrip() != '':
sys_line = sf.readline()
sys_i += 1
while sys_line.startswith('#') or sys_line.rstrip() == '' or sys_line.split('\t')[0] == '0':
sys_line = sf.readline()
sys_i += 1
gold_line = gold_line.rstrip().split('\t')
sys_line = sys_line.rstrip().split('\t')
assert sys_line[1] == gold_line[1], 'Files are misaligned at lines {}, {}'.format(gold_i, sys_i)
# Compute the gold edges
gold_node = gold_line[8]
if gold_node != '_':
gold_node = gold_node.split('|')
if labeled:
gold_edges = set(tuple(gold_edge.split(':', 1)) for gold_edge in gold_node)
else:
gold_edges = set(gold_edge.split(':', 1)[0] for gold_edge in gold_node)
else:
gold_edges = set()
# Compute the sys edges
sys_node = sys_line[8]
if sys_node != '_':
sys_node = sys_node.split('|')
if labeled:
sys_edges = set(tuple(sys_edge.split(':', 1)) for sys_edge in sys_node)
else:
sys_edges = set(sys_edge.split(':', 1)[0] for sys_edge in sys_node)
else:
sys_edges = set()
correct_edges = gold_edges & sys_edges
if len(correct_edges) != len(gold_edges):
current_seq_correct = False
correct += len(correct_edges)
predicted += len(sys_edges)
actual += len(gold_edges)
n_tokens += 1
#current_fp += len(sys_edges) - len(gold_edges & sys_edges)
gold_line = gf.readline()
gold_i += 1
#print(correct, predicted - correct, actual - correct)
Accuracy = namedtuple('Accuracy', ['precision', 'recall', 'F1', 'seq_acc'])
precision = correct / (predicted + 1e-12)
recall = correct / (actual + 1e-12)
F1 = 2 * precision * recall / (precision + recall + 1e-12)
seq_acc = n_correct_sequences / n_sequences
return Accuracy(precision, recall, F1, seq_acc)
#===============================================================
def main():
""""""
files = sys.argv[1:]
n_files = len(files)
assert (n_files % 2) == 0
gold_files, sys_files = files[:n_files//2], files[n_files//2:]
UAS = compute_F1(gold_files, sys_files, labeled=False)
LAS = compute_F1(gold_files, sys_files, labeled=True)
#print(UAS.F1, UAS.seq_acc)
print('{:0.1f}'.format(LAS.F1*100))
if __name__ == '__main__':
main()
| 34.04878
| 106
| 0.591691
|
fc7cb9a1304ca3bb447a5c4490996fd15222b2d8
| 1,157
|
py
|
Python
|
PWEM_functions/K_matrix.py
|
zhaonat/RCWA
|
a28fdf90b5b5fc0fedacc8bb44a0a0c2f2a02143
|
[
"MIT"
] | 66
|
2019-03-11T11:59:26.000Z
|
2022-03-25T05:17:11.000Z
|
PWEM_functions/K_matrix.py
|
Ydeh22/Rigorous-Coupled-Wave-Analysis
|
15f4300601899d08f57c95863df88280ab6f0d21
|
[
"MIT"
] | 5
|
2018-09-23T05:18:15.000Z
|
2021-11-07T20:16:40.000Z
|
PWEM_functions/K_matrix.py
|
Ydeh22/Rigorous-Coupled-Wave-Analysis
|
15f4300601899d08f57c95863df88280ab6f0d21
|
[
"MIT"
] | 33
|
2018-09-23T05:16:59.000Z
|
2022-03-23T08:37:07.000Z
|
'''
functions which generate the K matrices along each direction
'''
import numpy as np
from scipy import sparse
def K_matrix_cubic_2D(beta_x, beta_y, a_x, a_y, N_p, N_q):
# K_i = beta_i - pT1i - q T2i - r*T3i
# but here we apply it only for cubic and tegragonal geometries in 2D
'''
:param beta_i:
:param T1:reciprocal lattice vector 1
:param T2:
:param T3:
:return:
'''
k_x = beta_x - 2*np.pi*np.arange(-int(N_p), int(N_p)+1)/a_x;
k_y = beta_y - 2*np.pi*np.arange(-int(N_q), int(N_q)+1)/a_y;
kx, ky = np.meshgrid(k_x, k_y)
# final matrix should be sparse...since it is diagonal at most
Kx = sparse.diags(np.ndarray.flatten(kx)); #NxNy dimension matrix
Ky = sparse.diags(np.ndarray.flatten(ky))
Kx = Kx.astype('complex');
Ky = Ky.astype('complex')
return Kx, Ky
def K_matrix_cubic_3D(beta_x, beta_y, a_x, a_y, N_p, N_q):
# K_i = beta_i - pT1i - q T2i - r*T3i
# but here we apply it only for cubic and tegragonal geometries in 2D
'''
:param beta_i:
:param T1:reciprocal lattice vector 1
:param T2:
:param T3:
:return:
'''
return None;
| 28.925
| 73
| 0.631806
|
83372a3e86ce6491eb67f08b8deac76303e042fd
| 37,531
|
py
|
Python
|
torch/fx/_symbolic_trace.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | null | null | null |
torch/fx/_symbolic_trace.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | 1
|
2022-01-10T18:39:28.000Z
|
2022-01-10T19:15:57.000Z
|
torch/fx/_symbolic_trace.py
|
stungkit/pytorch
|
0f05e398705bf15406bce79f7ee57d3935ad2abd
|
[
"Intel"
] | 1
|
2022-03-26T14:42:50.000Z
|
2022-03-26T14:42:50.000Z
|
import builtins
import functools
import inspect
import math
import os
from types import CodeType, FunctionType, ModuleType
from typing import Any, Dict, NamedTuple, Optional, Set, Tuple, Type, List, Callable, Union
from itertools import chain
import torch
from torch._C import ScriptObject # type: ignore[attr-defined]
import torch.utils._pytree as pytree
from ._compatibility import compatibility
from .node import Argument, map_aggregate, base_types
from .graph import Graph, _PyTreeInfo, _PyTreeCodeGen
from .graph_module import GraphModule
from .proxy import TracerBase, Proxy, ParameterProxy
HAS_VARSTUFF = inspect.CO_VARARGS | inspect.CO_VARKEYWORDS
# These need to run in global scope to handle nested calls correctly
_orig_module_call : Callable = torch.nn.Module.__call__
_orig_module_getattr : Callable = torch.nn.Module.__getattr__
_proxyable_classes : Dict[Type, None] = {}
@compatibility(is_backward_compatible=True)
class ProxyableClassMeta(type):
"""
ProxyableClassMeta allows you to make construction of a given Python class
symbolically traceable. For example::
import torch
import torch.fx
class TensorPair(metaclass=torch.fx.ProxyableClassMeta):
def __init__(self, left, right):
self.left, self.right = left, right
def add(self, other):
l = self.left + other.left
r = self.right + other.right
return TensorPair(l, r)
def mul(self, other):
l = self.left * other.left
r = self.right * other.right
return TensorPair(l, r)
def use_tensor_pair_ctor(x : TensorPair, y : torch.Tensor):
s = x.add(TensorPair(y, y))
return s.mul(x)
x = TensorPair(torch.randn(5, 3), torch.randn(5, 3))
y = torch.randn(5, 3)
ref_out = use_tensor_pair_ctor(x, y)
traced = torch.fx.symbolic_trace(use_tensor_pair_ctor)
print(traced.code)
'''
def forward(self, x : __main___TensorPair, y : torch.Tensor):
tensor_pair = __main___TensorPair(y, y); y = None
add = x.add(tensor_pair); tensor_pair = None
mul = add.mul(x); add = x = None
return mul
'''
From this example, we can see that contruction of a class (``TensorPair``)
defined with ``ProxyableClassMeta`` as metaclass can be recorded in symbolic
tracing.
"""
def __init__(cls, name, bases, attrs):
_proxyable_classes.setdefault(cls)
super().__init__(name, bases, attrs)
def __call__(cls, *args, **kwargs):
instance = cls.__new__(cls) # type: ignore[call-overload]
found_proxies = []
def check_proxy(a):
if isinstance(a, Proxy):
found_proxies.append(a)
map_aggregate(args, check_proxy)
map_aggregate(kwargs, check_proxy)
if len(found_proxies) != 0:
tracer = found_proxies[0].tracer
return tracer.create_proxy('call_function', cls, args, kwargs)
else:
cls.__init__(instance, *args, **kwargs) # type: ignore[misc]
return instance
def _patch_function(fn: FunctionType, nargs: int) -> FunctionType:
co = fn.__code__
co_flags = co.co_flags & ~HAS_VARSTUFF
co_args : tuple
if hasattr(co, "co_posonlyargcount"):
co_args = (
nargs, 0,
0, co.co_nlocals, co.co_stacksize,
co_flags, co.co_code, co.co_consts, co.co_names,
co.co_varnames, co.co_filename, co.co_name,
co.co_firstlineno, co.co_lnotab, co.co_freevars,
co.co_cellvars
)
else:
co_args = (
nargs, 0, co.co_nlocals,
co.co_stacksize, co_flags, co.co_code, co.co_consts,
co.co_names, co.co_varnames, co.co_filename,
co.co_name, co.co_firstlineno, co.co_lnotab,
co.co_freevars, co.co_cellvars)
new_code = CodeType(*co_args) # type: ignore[arg-type]
return FunctionType(new_code, fn.__globals__, fn.__name__, fn.__defaults__, fn.__closure__)
# we need to insert placeholder nodes for *args and **kwargs
# we can't call this function normally, otherwise it would try to unpack them
# instead, let's make python think that args and kwargs are normal variables
@compatibility(is_backward_compatible=False)
class PHBase(object):
"""
Object representing an input placeholder to `concrete_args`
"""
def __repr__(self):
return 'PH'
PH = PHBase()
@compatibility(is_backward_compatible=True)
class Tracer(TracerBase):
# Reference: https://github.com/pytorch/pytorch/issues/54354
# The first line of this docstring overrides the one Sphinx generates for the
# documentation. We need it so that Sphinx doesn't leak `math`s path from the
# build environment (e.g. `<module 'math' from '/leaked/path').
"""Tracer(autowrap_modules=(math,), autowrap_functions=())
``Tracer`` is the class that implements the symbolic tracing functionality
of ``torch.fx.symbolic_trace``. A call to ``symbolic_trace(m)`` is equivalent
to ``Tracer().trace(m)``.
Tracer can be subclassed to override various behaviors of the tracing
process. The different behaviors that can be overridden are described
in the docstrings of the methods on this class.
"""
# Not checking BC on this API because the default value for `autowrap_modules`
# includes the local filepath to the `math` module, which would jitter
# across machines.
@compatibility(is_backward_compatible=True)
def __init__(self, autowrap_modules: Tuple[ModuleType] = (math, ),
autowrap_functions: Tuple[Callable, ...] = (),
param_shapes_constant: bool = False) -> None:
# This method's signature is overridden by the first line of this class'
# docstring. If this method's signature is modified, the signature that
# overrides it also should be modified accordingly.
"""
Construct a Tracer object.
Args:
autowrap_modules (Tuple[ModuleType]): defaults to `(math, )`,
Python modules whose functions should be wrapped automatically
without needing to use fx.wrap(). Backward-compatibility for
this parameter is guaranteed.
autowrap_function (Tuple[Callable, ...]): defaults to `()`,
Python functions that should be wrapped automatically without
needing to use fx.wrap(). Backward compabilibility for this
parameter is guaranteed.
param_shapes_constant (bool): When this flag is set, calls to shape,
size and a few other shape like attributes of a module's parameter
will be evaluted directly, rather than returning a new Proxy value
for an attribute access. Backward compatibility for this parameter
is guaranteed.
"""
super().__init__()
# Functions we will eagerly wrap when we see them while tracing
# this captures both `math.sqrt()` and `from math import sqrt` automatically
self._autowrap_function_ids: Set[int] = {
id(value) for name, value in chain(*[m.__dict__.items() for m in autowrap_modules])
if not name.startswith("_") and callable(value)}
self._autowrap_function_ids.update(set([id(f) for f in autowrap_functions]))
# Python modules to apply autowrap to at the start, in addition to
# modules we see while tracing
self._autowrap_search: List[ModuleType] = list(autowrap_modules)
self.param_shapes_constant = param_shapes_constant
self.submodule_paths: Optional[Dict[torch.nn.Module, str]] = None
@compatibility(is_backward_compatible=True)
def create_arg(self, a: Any) -> 'Argument':
"""
A method to specify the behavior of tracing when preparing values to
be used as arguments to nodes in the ``Graph``.
By default, the behavior includes:
#. Iterate through collection types (e.g. tuple, list, dict) and recursively
call ``create_args`` on the elements.
#. Given a Proxy object, return a reference to the underlying IR ``Node``
#. Given a non-Proxy Tensor object, emit IR for various cases:
* For a Parameter, emit a ``get_attr`` node referring to that Parameter
* For a non-Parameter Tensor, store the Tensor away in a special
attribute referring to that attribute.
This method can be overridden to support more types.
Args:
a (Any): The value to be emitted as an ``Argument`` in the ``Graph``.
Returns:
The value ``a`` converted into the appropriate ``Argument``
"""
# The base tracer is used to construct Graphs when there is no associated
# module hierarchy, so it can never create parameter references.
# The default tracer adds the ability to refer to parameters when
# tracing modules.
if isinstance(a, torch.nn.Parameter):
for n, p in self.root.named_parameters():
if a is p:
return self.create_node('get_attr', n, (), {})
raise NameError('parameter is not a member of this module')
elif isinstance(a, torch.Tensor):
for n_, p_ in self.root.named_buffers():
if a is p_:
return self.create_node('get_attr', n_, (), {})
elif isinstance(a, torch.nn.Module):
for n_, p_ in self.root.named_modules():
if a is p_:
return self.create_node('get_attr', n_, (), {})
# For NamedTuple instances that appear literally as args, we emit
# a node to construct the NamedTuple and use that Node as the argument.
if isinstance(a, tuple) and hasattr(a, '_fields'):
args = tuple(self.create_arg(elem) for elem in a)
return self.create_node('call_function', a.__class__, args, {})
# Tensors do not have a reliable string repr() from which they can be
# constructed (and we probably don't want to rely on that, either), so
# for any constant Tensor values we encounter, first search for if they
# are an attribute of some module in the module hierarchy. If so, emit
# a get_attr to retrieve that tensor. Otherwise, we'll store away the
# tensor value into a special attribute on the Module s.t. we can
# retrieve it with a get_attr.
if isinstance(a, (torch.Tensor, ScriptObject)):
qualname : Optional[str] = self.tensor_attrs.get(a)
# Tensor was not found in the Module hierarchy, stow it away in a
# special attribute and set the qualname to refer to that
if not qualname:
i = 0
while True:
qualname = f'_tensor_constant{i}'
if not hasattr(self.root, qualname):
break
i += 1
self.tensor_attrs[a] = qualname
setattr(self.root, qualname, a)
return self.create_node('get_attr', qualname, (), {})
if type(a) in _proxyable_classes:
# This is an instance of a proxyable class for which we did not
# witness its construction. Intern this as a constant attribute
# TODO: binary search
i = 0
while True:
qualname = f'_{a.__class__.__name__}_constant_{i}'
if not hasattr(self.root, qualname):
break
i += 1
setattr(self.root, qualname, a)
return self.create_node('get_attr', qualname, (), {})
return super().create_arg(a)
@compatibility(is_backward_compatible=True)
def is_leaf_module(self, m: torch.nn.Module, module_qualified_name : str) -> bool:
"""
A method to specify whether a given ``nn.Module`` is a "leaf" module.
Leaf modules are the atomic units that appear in
the IR, referenced by ``call_module`` calls. By default,
Modules in the PyTorch standard library namespace (torch.nn)
are leaf modules. All other modules are traced through and
their constituent ops are recorded, unless specified otherwise
via this parameter.
Args:
m (Module): The module being queried about
module_qualified_name (str): The path to root of this module. For example,
if you have a module hierarchy where submodule ``foo`` contains
submodule ``bar``, which contains submodule ``baz``, that module will
appear with the qualified name ``foo.bar.baz`` here.
"""
return m.__module__.startswith('torch.nn') and not isinstance(m, torch.nn.Sequential)
@compatibility(is_backward_compatible=True)
def path_of_module(self, mod : torch.nn.Module) -> str:
"""
Helper method to find the qualified name of ``mod`` in the Module hierarchy
of ``root``. For example, if ``root`` has a submodule named ``foo``, which has
a submodule named ``bar``, passing ``bar`` into this function will return
the string "foo.bar".
Args:
mod (str): The ``Module`` to retrieve the qualified name for.
"""
# Prefer the O(1) algorithm
if self.submodule_paths:
path = self.submodule_paths.get(mod)
if path is None:
raise NameError('module is not installed as a submodule')
assert isinstance(path, str)
return path
# O(N^2) fallback in the case that we didn't store the submodule
# paths.
else:
for n, p in self.root.named_modules():
if mod is p:
return n
raise NameError('module is not installed as a submodule')
@compatibility(is_backward_compatible=True)
def call_module(self, m: torch.nn.Module, forward: Callable[..., Any], args : Tuple[Any, ...], kwargs : Dict[str, Any]) -> Any:
"""
Method that specifies the behavior of this ``Tracer`` when it encounters
a call to an ``nn.Module`` instance.
By default, the behavior is to check if the called module is a leaf module
via ``is_leaf_module``. If it is, emit a ``call_module`` node referring to
``m`` in the ``Graph``. Otherwise, call the ``Module`` normally, tracing through
the operations in its ``forward`` function.
This method can be overridden to--for example--create nested traced
GraphModules, or any other behavior you would want while tracing across
``Module`` boundaries.
Args:
m (Module): The module for which a call is being emitted
forward (Callable): The forward() method of the ``Module`` to be invoked
args (Tuple): args of the module callsite
kwargs (Dict): kwargs of the module callsite
Return:
The return value from the Module call. In the case that a ``call_module``
node was emitted, this is a ``Proxy`` value. Otherwise, it is whatever
value was returned from the ``Module`` invocation.
"""
module_qualified_name = self.path_of_module(m)
if not self.is_leaf_module(m, module_qualified_name):
return forward(*args, **kwargs)
return self.create_proxy('call_module', module_qualified_name, args, kwargs)
# This method will be refactored
@compatibility(is_backward_compatible=False)
def create_args_for_root(self, root_fn, is_module, concrete_args=None):
"""
Create ``placeholder`` nodes corresponding to the signature of the ``root``
Module. This method introspects root's signature and emits those
nodes accordingly, also supporting ``*args`` and ``**kwargs``.
"""
# In some cases, a function or method has been decorated with a wrapper
# defined via ``functools.wraps``. In this case, the outer code object
# will likely not contain the actual parameters we care about, so unwrap
# the function to get to the innermost callable.
fn_for_analysis = inspect.unwrap(root_fn)
co = fn_for_analysis.__code__
total_args = co.co_argcount + co.co_kwonlyargcount
orig_args = list(co.co_varnames)
names_iter = iter(co.co_varnames)
args : List[Any] = []
skip_arg_idx = 0
if is_module:
if total_args == 0:
raise RuntimeError('``self`` argument cannot be part of *args expansion!')
skip_arg_idx = 1
next(names_iter) # skip self
args.append(self.root)
sig = inspect.signature(fn_for_analysis)
def proxy_placeholder(name: str):
if concrete_args is not None and name in concrete_args :
cnt = 0
def replace_ph(x):
nonlocal cnt
cnt += 1
param = sig.parameters[name]
default = () if param.default is inspect.Parameter.empty else (param.default,)
out = self.create_proxy('placeholder', f'{name}_{str(cnt)}', default, {})
if x == PH:
return out
# Union[int, bool] == bool in Python <= 3.6
if type(x) == bool or type(x) in base_types and type(x) != torch.Tensor:
torch._assert(out == x, f"{name} has been specialized to have value {x} but got another value")
elif type(x) == type(None):
args = (out, f"{name} has been specialized to have value None but got another value")
self.create_proxy('call_function', _assert_is_none, args, {})
else:
torch.warnings.warn(
f"Was not able to add assertion to guarantee correct input {name} to "
f"specialized function. It is up to the user to make sure that your inputs match the "
f"inputs you specialized the function with."
)
return x
return pytree.tree_map(replace_ph, concrete_args[name])
if name[0] == '*':
default = ()
else:
param = sig.parameters[name]
default = () if param.default is inspect.Parameter.empty else (param.default,) # type: ignore[assignment]
return self.create_proxy('placeholder', name, default, {},
type_expr=fn_for_analysis.__annotations__.get(name, None))
arg_names = [next(names_iter) for idx in range(skip_arg_idx, total_args)]
if isinstance(concrete_args, tuple):
if len(arg_names) != len(concrete_args):
raise RuntimeError(f"Tracing expected {len(arg_names)} arguments but got {len(concrete_args)} concrete arguments")
concrete_args = {name: val for name, val in zip(arg_names, concrete_args)}
args.extend(proxy_placeholder(names) for names in arg_names)
if co.co_kwonlyargcount > 0 or co.co_flags & HAS_VARSTUFF:
# TODO: type annotations for *args and **kwargs
if co.co_flags & inspect.CO_VARARGS:
args.append(proxy_placeholder('*' + next(names_iter)))
if co.co_flags & inspect.CO_VARKEYWORDS:
args.append(proxy_placeholder('**' + next(names_iter)))
root_fn = _patch_function(root_fn, len(args))
flat_args, in_spec = pytree.tree_flatten(tuple(args))
if any(not isinstance(i, pytree.LeafSpec) for i in in_spec.children_specs):
# In the case that we have pytree-flattened inputs in
# `concrete_args`, generate a flattening wrapper around the
# original root function and return that.
self.graph._codegen = _PyTreeCodeGen(_PyTreeInfo(orig_args[:total_args], in_spec, None))
def flatten_fn(*args):
tree_args = pytree.tree_unflatten(list(args), in_spec)
tree_out = root_fn(*tree_args)
out_args, out_spec = pytree.tree_flatten(tree_out)
assert(isinstance(self.graph._codegen, _PyTreeCodeGen))
self.graph._codegen.pytree_info = self.graph._codegen.pytree_info._replace(out_spec=out_spec)
return out_args
return flatten_fn, flat_args
return root_fn, args
def _module_getattr(self, attr, attr_val, parameter_proxy_cache):
def maybe_get_proxy_for_attr(attr_val, collection_to_search, parameter_proxy_cache):
for n, p in collection_to_search:
if attr_val is p:
if n not in parameter_proxy_cache:
kwargs = {}
if 'proxy_factory_fn' in inspect.signature(self.create_proxy).parameters:
kwargs['proxy_factory_fn'] = (None if not self.param_shapes_constant else
lambda node : ParameterProxy(self, node, n, attr_val))
val_proxy = self.create_proxy('get_attr', n, (), {}, **kwargs) # type: ignore[arg-type]
parameter_proxy_cache[n] = val_proxy
return parameter_proxy_cache[n]
return None
if isinstance(attr_val, torch.nn.Parameter):
maybe_parameter_proxy = maybe_get_proxy_for_attr(attr_val, self.root.named_parameters(), parameter_proxy_cache)
if maybe_parameter_proxy is not None:
return maybe_parameter_proxy
if self.proxy_buffer_attributes and isinstance(attr_val, torch.Tensor):
maybe_buffer_proxy = maybe_get_proxy_for_attr(attr_val, self.root.named_buffers(), parameter_proxy_cache)
if maybe_buffer_proxy is not None:
return maybe_buffer_proxy
return attr_val
@compatibility(is_backward_compatible=True)
def trace(self, root: Union[torch.nn.Module, Callable[..., Any]], concrete_args: Optional[Dict[str, Any]] = None) -> Graph:
"""
Trace ``root`` and return the corresponding FX ``Graph`` representation. ``root``
can either be an ``nn.Module`` instance or a Python callable.
Note that after this call, ``self.root`` may be different from the ``root`` passed
in here. For example, when a free function is passed to ``trace()``, we will
create an ``nn.Module`` instance to use as the root and add embedded constants
to.
Args:
root (Union[Module, Callable]): Either a ``Module`` or a function to be
traced through. Backwards-compatibility for this parameter is
guaranteed.
concrete_args (Optional[Dict[str, any]]): Concrete arguments that should
not be treated as Proxies. This parameter is experimental and
its backwards-compatibility is *NOT* guaranteed.
Returns:
A ``Graph`` representing the semantics of the passed-in ``root``.
"""
if isinstance(root, torch.nn.Module):
self.root = root
assert hasattr(
type(root), self.traced_func_name
), f"traced_func_name={self.traced_func_name} doesn't exist in {type(root).__name__}"
fn = getattr(type(root), self.traced_func_name)
self.submodule_paths = {mod: name for name, mod in root.named_modules()}
else:
self.root = torch.nn.Module()
fn = root
tracer_cls: Optional[Type['Tracer']] = getattr(self, '__class__', None)
self.graph = Graph(tracer_cls=tracer_cls)
# When we encounter a Tensor value that's not a parameter, we look if it
# is some other attribute on the model. Construct a dict mapping Tensor
# values to the qualified name here for efficiency. This is used downstream
# in create_arg
self.tensor_attrs : Dict[Union[torch.Tensor, ScriptObject], str] = {}
def collect_tensor_attrs(m : torch.nn.Module, prefix_atoms : List[str]):
for k, v in m.__dict__.items():
if isinstance(v, (torch.Tensor, ScriptObject)):
self.tensor_attrs[v] = '.'.join(prefix_atoms + [k])
for k, v in m.named_children():
collect_tensor_attrs(v, prefix_atoms + [k])
collect_tensor_attrs(self.root, [])
assert isinstance(fn, FunctionType)
fn_globals = fn.__globals__ # run before it gets patched
fn, args = self.create_args_for_root(fn, isinstance(root, torch.nn.Module), concrete_args)
parameter_proxy_cache : Dict[str, Proxy] = {} # Reduce number of get_attr calls
# Method dispatch on parameters is not recorded unless it's directly used.
# Thus, we need to insert a proxy when __getattr__ requests a parameter.
@functools.wraps(_orig_module_getattr)
def module_getattr_wrapper(mod, attr):
attr_val = _orig_module_getattr(mod, attr)
return self._module_getattr(attr, attr_val, parameter_proxy_cache)
@functools.wraps(_orig_module_call)
def module_call_wrapper(mod, *args, **kwargs):
def forward(*args, **kwargs):
return _orig_module_call(mod, *args, **kwargs)
_autowrap_check(patcher, getattr(getattr(mod, "forward", mod), "__globals__", {}),
self._autowrap_function_ids)
return self.call_module(mod, forward, args, kwargs)
with _Patcher() as patcher:
# allow duplicate patches to support the case of nested calls
patcher.patch_method(torch.nn.Module, "__getattr__", module_getattr_wrapper, deduplicate=False)
patcher.patch_method(torch.nn.Module, "__call__", module_call_wrapper, deduplicate=False)
_patch_wrapped_functions(patcher)
_autowrap_check(patcher, fn_globals, self._autowrap_function_ids)
for module in self._autowrap_search:
_autowrap_check(patcher, module.__dict__, self._autowrap_function_ids)
self.create_node('output', 'output', (self.create_arg(fn(*args)),), {},
type_expr=fn.__annotations__.get('return', None))
self.submodule_paths = None
return self.graph
# List of pairs of (global dict, function name) functions
# to patch for the purposes of the wrap() API.
_wrapped_fns_to_patch : List[Tuple[dict, str]] = []
# List of methods on classes to wrap (class type, function name)
# this currently only works for Tensor.* methods that aren't traced properly
_wrapped_methods_to_patch : List[Tuple[type, str]] = []
if os.environ.get("FX_PATCH_GETITEM") == "1":
# This change is needed to trace models like PositionalEmbedding from BERT:
# https://github.com/pytorch/benchmark/blob/master/torchbenchmark/models/BERT_pytorch/bert_pytorch/model/embedding/position.py
# but causes issues in quantization documented here:
# https://github.com/pytorch/pytorch/issues/50710
# once that is fixed we can make this the default behavior.
_wrapped_methods_to_patch.append((torch.Tensor, "__getitem__"))
def _find_proxy(*objects_to_search):
"""
Recursively search a data structure for a Proxy() and return it,
return None if not found.
"""
proxy = None
def find_proxy(x):
nonlocal proxy
if isinstance(x, Proxy):
proxy = x
map_aggregate(objects_to_search, find_proxy)
return proxy
def _create_wrapped_func(orig_fn):
@functools.wraps(orig_fn)
def wrapped(*args, **kwargs):
"""
Given an closed-over ``orig_function`` to invoke, search the args and kwargs for
a Proxy object. If there is one, emit a ``call_function`` node to preserve the
call to this leaf function directly. Otherwise, just return the results of
this function call, as this function is not being traced.
"""
proxy = _find_proxy(args, kwargs)
if proxy is not None:
return_proxy = proxy.tracer.create_proxy('call_function', orig_fn, args, kwargs)
return_proxy.node.meta['is_wrapped'] = True
return return_proxy
return orig_fn(*args, **kwargs)
return wrapped
def _create_wrapped_method(cls, name):
orig_fn = getattr(cls, name)
@functools.wraps(orig_fn)
def wrapped(*args, **kwargs):
"""
Search the args and kwargs for a Proxy object. If there is one,
emit a ``call_method`` node to preserve the call to this method
directly. Otherwise, just return the results of this function
call, as this function is not being traced.
"""
proxy = _find_proxy(args, kwargs)
if proxy is not None:
return proxy.tracer.create_proxy('call_method', name, args, kwargs)
return orig_fn(*args, **kwargs)
return wrapped
class _PatchedFn(NamedTuple):
frame_dict : Any
fn_name : str
orig_fn : Any
def revert(self):
raise NotImplementedError()
class _PatchedFnSetItem(_PatchedFn):
def revert(self):
self.frame_dict[self.fn_name] = self.orig_fn
class _PatchedFnDel(_PatchedFn):
def revert(self):
del self.frame_dict[self.fn_name]
class _PatchedFnSetAttr(_PatchedFn):
def revert(self):
setattr(self.frame_dict, self.fn_name, self.orig_fn)
class _Patcher(object):
def __init__(self):
super(_Patcher, self).__init__()
self.patches_made : List[_PatchedFn] = []
self.visited : Set[int] = set()
def patch(self, frame_dict : Dict[str, Any], name : str, new_fn : Callable,
deduplicate : bool = True):
"""
Replace frame_dict[name] with new_fn until we exit the context manager.
"""
new_fn.__fx_already_patched = deduplicate # type: ignore[attr-defined]
if name not in frame_dict and hasattr(builtins, name):
self.patches_made.append(_PatchedFnDel(frame_dict, name, None))
elif getattr(frame_dict[name], "__fx_already_patched", False):
return # already patched, no need to do it again
else:
self.patches_made.append(_PatchedFnSetItem(frame_dict, name, frame_dict[name]))
frame_dict[name] = new_fn
def patch_method(self, cls: type, name : str, new_fn : Callable,
deduplicate : bool = True):
"""
Replace object_or_dict.name with new_fn until we exit the context manager.
"""
new_fn.__fx_already_patched = deduplicate # type: ignore[attr-defined]
orig_fn = getattr(cls, name)
if getattr(orig_fn, "__fx_already_patched", False):
return # already patched, no need to do it again
self.patches_made.append(_PatchedFnSetAttr(cls, name, orig_fn))
setattr(cls, name, new_fn)
def visit_once(self, thing: Any):
""" Return True on the first call to with thing, otherwise false """
idx = id(thing)
if idx in self.visited:
return False
self.visited.add(idx)
return True
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Undo all the changes made via self.patch() and self.patch_method()
"""
while self.patches_made:
# unpatch in reverse order to handle duplicates correctly
self.patches_made.pop().revert()
self.visited.clear()
def _patch_wrapped_functions(patcher : _Patcher):
"""
Go through ``_wrapped_fn_patch_table`` and, for each frame object, wrap
the listed global functions in the `_create_wrapped_func` wrapper.
"""
for frame_dict, name in _wrapped_fns_to_patch:
if name not in frame_dict and hasattr(builtins, name):
orig_fn = getattr(builtins, name)
else:
orig_fn = frame_dict[name]
patcher.patch(frame_dict, name, _create_wrapped_func(orig_fn))
for cls, name in _wrapped_methods_to_patch:
patcher.patch_method(cls, name, _create_wrapped_method(cls, name))
def _autowrap_check(patcher : _Patcher, frame_dict : Dict[str, Any], function_ids : Set[int]):
"""
Some methods, like `math.sqrt` are common enough we want to automatically wrap them as we see them.
This method searches a scope for them and patches them if found.
"""
if patcher.visit_once(frame_dict):
for name, value in frame_dict.items():
if not name.startswith("_") and callable(value) and id(value) in function_ids:
patcher.patch(frame_dict, name, _create_wrapped_func(value))
@compatibility(is_backward_compatible=True)
def wrap(fn_or_name : Union[str, Callable]):
"""
This function can be called at module-level scope to register fn_or_name as a "leaf function".
A "leaf function" will be preserved as a CallFunction node in the FX trace instead of being
traced through::
# foo/bar/baz.py
def my_custom_function(x, y):
return x * x + y * y
torch.fx.wrap('my_custom_function')
def fn_to_be_traced(x, y):
# When symbolic tracing, the below call to my_custom_function will be inserted into
# the graph rather than tracing it.
return my_custom_function(x, y)
This function can also equivalently be used as a decorator::
# foo/bar/baz.py
@torch.fx.wrap
def my_custom_function(x, y):
return x * x + y * y
A wrapped function can be thought of a "leaf function", analogous to the concept of
"leaf modules", that is, they are functions that are left as calls in the FX trace
rather than traced through.
Args:
fn_or_name (Union[str, Callable]): The function or name of the global function to insert into the
graph when it's called
"""
if not callable(fn_or_name) and not isinstance(fn_or_name, str):
raise RuntimeError('Unsupported type for global function! Must be either a callable or '
'string name')
if hasattr(fn_or_name, '__code__'):
assert not isinstance(fn_or_name, str) # to make mypy happy
fn_name = fn_or_name.__code__.co_name
else:
assert isinstance(fn_or_name, str), "fn_or_name must be a global function or string name"
fn_name = fn_or_name
currentframe = inspect.currentframe()
assert currentframe is not None
f = currentframe.f_back
assert f is not None
if f.f_code.co_name != '<module>':
raise NotImplementedError('wrap must be called at the top level of a module')
# consider implementing Callable version of this via _autowrap_function_ids / _autowrap_search
# semantics would be slightly different, but would add support `from x import wrapped_function`
_wrapped_fns_to_patch.append((f.f_globals, fn_name))
return fn_or_name
@compatibility(is_backward_compatible=True)
def symbolic_trace(root : Union[torch.nn.Module, Callable[..., Any]],
concrete_args: Optional[Dict[str, Any]] = None) -> GraphModule:
"""
Symbolic tracing API
Given an ``nn.Module`` or function instance ``root``, this function will return a ``GraphModule``
constructed by recording operations seen while tracing through ``root``.
``concrete_args`` allows you to partially specialize your function, whether it's to remove control flow or data structures.
For example::
def f(a, b):
if b == True:
return a
else:
return a*2
FX can typically not trace through this due to the presence of control
flow. However, we can use `concrete_args` to specialize on the value of
`b` to trace through this.
f = fx.symbolic_trace(f, concrete_args={'b': False})
assert f(3, False) == 6
Note that although you can still pass in different values of `b`, they will be ignored.
We can also use `concrete_args` to eliminate data-structure handling from
our function. This will use pytrees to flatten your input. To avoid
overspecializing, pass in `fx.PH` for values that shouldn't be
specialized. For example::
def f(x):
out = 0
for v in x.values():
out += v
return out
f = fx.symbolic_trace(f, concrete_args={'x': {'a': fx.PH, 'b': fx.PH, 'c': fx.PH}})
assert f({'a': 1, 'b': 2, 'c': 4}) == 7
Args:
root (Union[torch.nn.Module, Callable]): Module or function to be traced and converted
into a Graph representation.
concrete_args (Optional[Dict[str, any]]): Inputs to be partially specialized
Returns:
GraphModule: a Module created from the recorded operations from ``root``.
"""
tracer = Tracer()
graph = tracer.trace(root, concrete_args)
name = root.__class__.__name__ if isinstance(root, torch.nn.Module) else root.__name__
return GraphModule(tracer.root, graph, name)
@wrap
def _assert_is_none(value, msg):
assert value is None, msg
| 42.360045
| 131
| 0.631611
|
b41c681d9aa3401883fd354651d7125c353d15ec
| 11,822
|
py
|
Python
|
code/switcher.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 298
|
2020-02-23T03:00:51.000Z
|
2022-03-30T02:11:00.000Z
|
code/switcher.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 521
|
2020-02-21T18:21:17.000Z
|
2022-03-31T16:40:34.000Z
|
code/switcher.py
|
CameronSBell/knausj_talon
|
3e57e0165257cf07b0e21880d44a91e79cb3ef16
|
[
"MIT"
] | 499
|
2020-03-07T05:43:52.000Z
|
2022-03-28T12:24:54.000Z
|
import os
import re
import time
import talon
from talon import Context, Module, app, imgui, ui, fs, actions
from glob import glob
from itertools import islice
from pathlib import Path
import subprocess
# Construct at startup a list of overides for application names (similar to how homophone list is managed)
# ie for a given talon recognition word set `one note`, recognized this in these switcher functions as `ONENOTE`
# the list is a comma seperated `<Recognized Words>, <Overide>`
# TODO: Consider put list csv's (homophones.csv, app_name_overrides.csv) files together in a seperate directory,`knausj_talon/lists`
cwd = os.path.dirname(os.path.realpath(__file__))
overrides_directory = os.path.join(cwd, "app_names")
override_file_name = f"app_name_overrides.{talon.app.platform}.csv"
override_file_path = os.path.join(overrides_directory, override_file_name)
mod = Module()
mod.list("running", desc="all running applications")
mod.list("launch", desc="all launchable applications")
ctx = Context()
# a list of the current overrides
overrides = {}
# a list of the currently running application names
running_application_dict = {}
mac_application_directories = [
"/Applications",
"/Applications/Utilities",
"/System/Applications",
"/System/Applications/Utilities",
]
words_to_exclude = [
"zero",
"one",
"two",
"three",
"for",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"and",
"dot",
"exe",
"help",
"install",
"installer",
"microsoft",
"nine",
"readme",
"studio",
"terminal",
"visual",
"windows",
]
# on Windows, WindowsApps are not like normal applications, so
# we use the shell:AppsFolder to populate the list of applications
# rather than via e.g. the start menu. This way, all apps, including "modern" apps are
# launchable. To easily retrieve the apps this makes available, navigate to shell:AppsFolder in Explorer
if app.platform == "windows":
import os
import ctypes
import pywintypes
import pythoncom
import winerror
try:
import winreg
except ImportError:
# Python 2
import _winreg as winreg
bytes = lambda x: str(buffer(x))
from ctypes import wintypes
from win32com.shell import shell, shellcon
from win32com.propsys import propsys, pscon
# KNOWNFOLDERID
# https://msdn.microsoft.com/en-us/library/dd378457
# win32com defines most of these, except the ones added in Windows 8.
FOLDERID_AppsFolder = pywintypes.IID("{1e87508d-89c2-42f0-8a7e-645a0f50ca58}")
# win32com is missing SHGetKnownFolderIDList, so use ctypes.
_ole32 = ctypes.OleDLL("ole32")
_shell32 = ctypes.OleDLL("shell32")
_REFKNOWNFOLDERID = ctypes.c_char_p
_PPITEMIDLIST = ctypes.POINTER(ctypes.c_void_p)
_ole32.CoTaskMemFree.restype = None
_ole32.CoTaskMemFree.argtypes = (wintypes.LPVOID,)
_shell32.SHGetKnownFolderIDList.argtypes = (
_REFKNOWNFOLDERID, # rfid
wintypes.DWORD, # dwFlags
wintypes.HANDLE, # hToken
_PPITEMIDLIST,
) # ppidl
def get_known_folder_id_list(folder_id, htoken=None):
if isinstance(folder_id, pywintypes.IIDType):
folder_id = bytes(folder_id)
pidl = ctypes.c_void_p()
try:
_shell32.SHGetKnownFolderIDList(folder_id, 0, htoken, ctypes.byref(pidl))
return shell.AddressAsPIDL(pidl.value)
except WindowsError as e:
if e.winerror & 0x80070000 == 0x80070000:
# It's a WinAPI error, so re-raise it, letting Python
# raise a specific exception such as FileNotFoundError.
raise ctypes.WinError(e.winerror & 0x0000FFFF)
raise
finally:
if pidl:
_ole32.CoTaskMemFree(pidl)
def enum_known_folder(folder_id, htoken=None):
id_list = get_known_folder_id_list(folder_id, htoken)
folder_shell_item = shell.SHCreateShellItem(None, None, id_list)
items_enum = folder_shell_item.BindToHandler(
None, shell.BHID_EnumItems, shell.IID_IEnumShellItems
)
for item in items_enum:
yield item
def list_known_folder(folder_id, htoken=None):
result = []
for item in enum_known_folder(folder_id, htoken):
result.append(item.GetDisplayName(shellcon.SIGDN_NORMALDISPLAY))
result.sort(key=lambda x: x.upper())
return result
def get_windows_apps():
items = {}
for item in enum_known_folder(FOLDERID_AppsFolder):
try:
property_store = item.BindToHandler(
None, shell.BHID_PropertyStore, propsys.IID_IPropertyStore
)
app_user_model_id = property_store.GetValue(
pscon.PKEY_AppUserModel_ID
).ToString()
except pywintypes.error:
continue
name = item.GetDisplayName(shellcon.SIGDN_NORMALDISPLAY)
# exclude anything with install/uninstall...
# 'cause I don't think we don't want 'em
if "install" not in name.lower():
items[name] = app_user_model_id
return items
@mod.capture(rule="{self.running}") # | <user.text>)")
def running_applications(m) -> str:
"Returns a single application name"
try:
return m.running
except AttributeError:
return m.text
@mod.capture(rule="{self.launch}")
def launch_applications(m) -> str:
"Returns a single application name"
return m.launch
def update_running_list():
global running_application_dict
running_application_dict = {}
running = {}
for cur_app in ui.apps(background=False):
running_application_dict[cur_app.name] = True
if app.platform == "windows":
# print("hit....")
# print(cur_app.exe)
running_application_dict[cur_app.exe.split(os.path.sep)[-1]] = True
running = actions.user.create_spoken_forms_from_list(
[curr_app.name for curr_app in ui.apps(background=False)],
words_to_exclude=words_to_exclude,
generate_subsequences=True,
)
# print(str(running_application_dict))
# todo: should the overrides remove the other spoken forms for an application?
for override in overrides:
if overrides[override] in running_application_dict:
running[override] = overrides[override]
lists = {
"self.running": running,
}
# batch update lists
ctx.lists.update(lists)
def update_overrides(name, flags):
"""Updates the overrides list"""
global overrides
overrides = {}
if name is None or name == override_file_path:
# print("update_overrides")
with open(override_file_path, "r") as f:
for line in f:
line = line.rstrip()
line = line.split(",")
if len(line) == 2:
overrides[line[0].lower()] = line[1].strip()
update_running_list()
@mod.action_class
class Actions:
def get_running_app(name: str) -> ui.App:
"""Get the first available running app with `name`."""
# We should use the capture result directly if it's already in the list
# of running applications. Otherwise, name is from <user.text> and we
# can be a bit fuzzier
if name not in running_application_dict:
if len(name) < 3:
raise RuntimeError(
f'Skipped getting app: "{name}" has less than 3 chars.'
)
for running_name, full_application_name in ctx.lists[
"self.running"
].items():
if running_name == name or running_name.lower().startswith(
name.lower()
):
name = full_application_name
break
for application in ui.apps(background=False):
if application.name == name or (
app.platform == "windows"
and application.exe.split(os.path.sep)[-1] == name
):
return application
raise RuntimeError(f'App not running: "{name}"')
def switcher_focus(name: str):
"""Focus a new application by name"""
app = actions.user.get_running_app(name)
actions.user.switcher_focus_app(app)
def switcher_focus_app(app: ui.App):
"""Focus application and wait until switch is made"""
app.focus()
t1 = time.monotonic()
while ui.active_app() != app:
if time.monotonic() - t1 > 1:
raise RuntimeError(f"Can't focus app: {app.name}")
actions.sleep(0.1)
def switcher_focus_window(window: ui.Window):
"""Focus window and wait until switch is made"""
window.focus()
t1 = time.monotonic()
while ui.active_window() != window:
if time.monotonic() - t1 > 1:
raise RuntimeError(f"Can't focus window: {window.title}")
actions.sleep(0.1)
def switcher_launch(path: str):
"""Launch a new application by path (all OSes), or AppUserModel_ID path on Windows"""
if app.platform != "windows":
ui.launch(path=path)
else:
is_valid_path = False
try:
current_path = Path(path)
is_valid_path = current_path.is_file()
except:
is_valid_path = False
if is_valid_path:
ui.launch(path=path)
else:
cmd = "explorer.exe shell:AppsFolder\\{}".format(path)
subprocess.Popen(cmd, shell=False)
def switcher_menu():
"""Open a menu of running apps to switch to"""
if app.platform == "windows":
actions.key("alt-ctrl-tab")
else:
print("Persistent Switcher Menu not supported on " + app.platform)
def switcher_toggle_running():
"""Shows/hides all running applications"""
if gui_running.showing:
gui_running.hide()
else:
gui_running.show()
def switcher_hide_running():
"""Hides list of running applications"""
gui_running.hide()
@imgui.open()
def gui_running(gui: imgui.GUI):
gui.text("Names of running applications")
gui.line()
for line in ctx.lists["self.running"]:
gui.text(line)
gui.spacer()
if gui.button("Running close"):
actions.user.switcher_hide_running()
def update_launch_list():
launch = {}
if app.platform == "mac":
for base in mac_application_directories:
if os.path.isdir(base):
for name in os.listdir(base):
path = os.path.join(base, name)
name = name.rsplit(".", 1)[0].lower()
launch[name] = path
elif app.platform == "windows":
launch = get_windows_apps()
# actions.user.talon_pretty_print(launch)
ctx.lists["self.launch"] = actions.user.create_spoken_forms_from_map(
launch, words_to_exclude
)
def ui_event(event, arg):
if event in ("app_launch", "app_close"):
update_running_list()
# Currently update_launch_list only does anything on mac, so we should make sure
# to initialize user launch to avoid getting "List not found: user.launch"
# errors on other platforms.
ctx.lists["user.launch"] = {}
ctx.lists["user.running"] = {}
# Talon starts faster if you don't use the `talon.ui` module during launch
def on_ready():
update_overrides(None, None)
fs.watch(overrides_directory, update_overrides)
update_launch_list()
update_running_list()
ui.register("", ui_event)
app.register("ready", on_ready)
| 31.441489
| 132
| 0.623837
|
dfbe2939f82851b58a94712d5f50a09c716052bb
| 5,606
|
py
|
Python
|
Generative/GENERATESAMPLES/util/utils.py
|
NREL/GANISP
|
3ce6979e26f837d05b8f7cfbe2b949f900b6026b
|
[
"BSD-3-Clause"
] | 5
|
2021-06-17T18:55:24.000Z
|
2022-02-14T15:09:33.000Z
|
Generative/GENERATESAMPLES/util/utils.py
|
NREL/GANISP
|
3ce6979e26f837d05b8f7cfbe2b949f900b6026b
|
[
"BSD-3-Clause"
] | null | null | null |
Generative/GENERATESAMPLES/util/utils.py
|
NREL/GANISP
|
3ce6979e26f837d05b8f7cfbe2b949f900b6026b
|
[
"BSD-3-Clause"
] | 1
|
2022-02-23T13:48:06.000Z
|
2022-02-23T13:48:06.000Z
|
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
def conv_layer_2d(x, filter_shape, stride, trainable=True):
filter_ = tf.get_variable(
name='weight',
shape=filter_shape,
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
trainable=trainable)
bias_ = tf.get_variable(
name='bias',
shape=[filter_shape[-1]],
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
trainable=trainable)
x = tf.nn.bias_add(tf.nn.conv2d(
input=x,
filter=filter_,
strides=[1, stride, stride, 1],
padding='SAME'), bias_)
return x
def deconv_layer_2d(x, filter_shape, output_shape, stride, trainable=True):
filter_ = tf.get_variable(
name='weight',
shape=filter_shape,
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
trainable=trainable)
bias_ = tf.get_variable(
name='bias',
shape=[output_shape[-1]],
dtype=tf.float32,
initializer=tf.contrib.layers.xavier_initializer(),
trainable=trainable)
x = tf.nn.bias_add(tf.nn.conv2d_transpose(
value=x,
filter=filter_,
output_shape=output_shape,
strides=[1, stride, stride, 1],
padding='SAME'), bias_)
return x
def flatten_layer(x):
input_shape = x.get_shape().as_list()
if len(input_shape) == 5:
dim = input_shape[1] * input_shape[2] * input_shape[3]
transposed = tf.transpose(x, (0, 4, 1, 2, 3))
return tf.reshape(transposed, [-1, dim])
elif len(input_shape) == 4:
dim = input_shape[1] * input_shape[2]
transposed = tf.transpose(x, (0, 3, 1, 2))
return tf.reshape(transposed, [-1, dim])
def dense_layer(x, out_dim, trainable=True):
in_dim = x.get_shape().as_list()[-1]
W = tf.get_variable(
name='weight',
shape=[in_dim, out_dim],
dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.02),
trainable=trainable)
b = tf.get_variable(
name='bias',
shape=[out_dim],
dtype=tf.float32,
initializer=tf.constant_initializer(0.0),
trainable=trainable)
return tf.add(tf.matmul(x, W), b)
def plot_SR_data(idx, LR, SR, path):
r0 = int(SR.shape[2]/LR.shape[1])
r1 = int(SR.shape[3]/LR.shape[2])
LR_exp = np.expand_dims(LR.repeat(r0, axis=1).repeat(r1, axis=2), axis=1)
res_batch = SR - LR_exp
N_subs = np.minimum(3, SR.shape[1])
for i in range(LR.shape[0]):
vmin0 = np.minimum(np.min(LR[i,:,:,0]), np.min(SR[i,:,:,0]))
vmax0 = np.maximum(np.max(LR[i,:,:,0]), np.max(SR[i,:,:,0]))
vmin1 = np.minimum(np.min(LR[i,:,:,1]), np.min(SR[i,:,:,1]))
vmax1 = np.maximum(np.max(LR[i,:,:,1]), np.max(SR[i,:,:,1]))
fig_width = 9 + 2*N_subs
fig, ax = plt.subplots(2, 1+N_subs, figsize=(fig_width, 8))
im = ax[0, 0].imshow(LR[i, :, :, 0], vmin=vmin0, vmax=vmax0, cmap='viridis', origin='lower')
ax[0, 0].set_title('LR 0 Input', fontsize=9)
fig.colorbar(im, ax=ax[0, 0])
ax[0, 0].set_xticks([], [])
ax[0, 0].set_yticks([], [])
im = ax[1, 0].imshow(LR[i, :, :, 1], vmin=vmin1, vmax=vmax1, cmap='viridis', origin='lower')
ax[1, 0].set_title('LR 1 Input', fontsize=9)
fig.colorbar(im, ax=ax[1, 0])
ax[1, 0].set_xticks([], [])
ax[1, 0].set_yticks([], [])
for j in range(N_subs):
im = ax[0, j+1].imshow(SR[i, j, :, :, 0], vmin=vmin0, vmax=vmax0, cmap='viridis', origin='lower')
ax[0, j+1].set_title('SR 0 - {}'.format(j), fontsize=9)
fig.colorbar(im, ax=ax[0, j+1])
ax[0, j+1].set_xticks([], [])
ax[0, j+1].set_yticks([], [])
im = ax[1, j+1].imshow(SR[i, j, :, :, 1], vmin=vmin1, vmax=vmax1, cmap='viridis', origin='lower')
ax[1, j+1].set_title('SR 1 - {}'.format(j), fontsize=9)
fig.colorbar(im, ax=ax[1, j+1])
ax[1, j+1].set_xticks([], [])
ax[1, j+1].set_yticks([], [])
plt.savefig(path+'/{0:05d}fields.png'.format(idx[i]), dpi=200, bbox_inches='tight')
plt.close()
fig, ax = plt.subplots(2, 2, figsize=(8, 8))
im = ax[0, 0].imshow(np.mean(res_batch[i, ..., 0], axis=0), cmap='viridis', origin='lower')
ax[0, 0].set_title('Conditional Mean 0', fontsize=9)
fig.colorbar(im, ax=ax[0, 0])
ax[0, 0].set_xticks([], [])
ax[0, 0].set_yticks([], [])
im = ax[0, 1].imshow(np.std(res_batch[i, ..., 0], axis=0), cmap='viridis', origin='lower')
ax[0, 1].set_title('Conditional Std. Dev. 0', fontsize=9)
fig.colorbar(im, ax=ax[0, 1])
ax[0, 1].set_xticks([], [])
ax[0, 1].set_yticks([], [])
im = ax[1, 0].imshow(np.mean(res_batch[i, ..., 1], axis=0), cmap='viridis', origin='lower')
ax[1, 0].set_title('Conditional Mean 1', fontsize=9)
fig.colorbar(im, ax=ax[1, 0])
ax[1, 0].set_xticks([], [])
ax[1, 0].set_yticks([], [])
im = ax[1, 1].imshow(np.std(res_batch[i, ..., 1], axis=0), cmap='viridis', origin='lower')
ax[1, 1].set_title('Conditional Std. Dev. 1', fontsize=9)
fig.colorbar(im, ax=ax[1, 1])
ax[1, 1].set_xticks([], [])
ax[1, 1].set_yticks([], [])
plt.savefig(path+'/{0:05d}stats.png'.format(idx[i]), dpi=200, bbox_inches='tight')
plt.close()
| 36.402597
| 109
| 0.551374
|
84ec31f0c2ea713b9cbb5d83d7d1a8e405f602c8
| 2,516
|
py
|
Python
|
sympy/polys/tests/test_rationaltools.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/polys/tests/test_rationaltools.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/polys/tests/test_rationaltools.py
|
msgoff/sympy
|
1e7daef7514902f5e89718fa957b7b36c6669a10
|
[
"BSD-3-Clause"
] | null | null | null |
"""Tests for tools for manipulation of rational expressions. """
from sympy.polys.rationaltools import together
from sympy import S, symbols, Rational, sin, exp, Eq, Integral, Mul
from sympy.abc import x, y, z
A, B = symbols("A,B", commutative=False)
def test_together():
assert together(0) == 0
assert together(1) == 1
assert together(x * y * z) == x * y * z
assert together(x + y) == x + y
assert together(1 / x) == 1 / x
assert together(1 / x + 1) == (x + 1) / x
assert together(1 / x + 3) == (3 * x + 1) / x
assert together(1 / x + x) == (x ** 2 + 1) / x
assert together(1 / x + S.Half) == (x + 2) / (2 * x)
assert together(S.Half + x / 2) == Mul(S.Half, x + 1, evaluate=False)
assert together(1 / x + 2 / y) == (2 * x + y) / (y * x)
assert together(1 / (1 + 1 / x)) == x / (1 + x)
assert together(x / (1 + 1 / x)) == x ** 2 / (1 + x)
assert together(1 / x + 1 / y + 1 / z) == (x * y + x * z + y * z) / (x * y * z)
assert together(1 / (1 + x + 1 / y + 1 / z)) == y * z / (y + z + y * z + x * y * z)
assert together(1 / (x * y) + 1 / (x * y) ** 2) == y ** (-2) * x ** (-2) * (
1 + x * y
)
assert together(1 / (x * y) + 1 / (x * y) ** 4) == y ** (-4) * x ** (-4) * (
1 + x ** 3 * y ** 3
)
assert together(1 / (x ** 7 * y) + 1 / (x * y) ** 4) == y ** (-4) * x ** (-7) * (
x ** 3 + y ** 3
)
assert together(5 / (2 + 6 / (3 + 7 / (4 + 8 / (5 + 9 / x))))) == Rational(5, 2) * (
(171 + 119 * x) / (279 + 203 * x)
)
assert together(1 + 1 / (x + 1) ** 2) == (1 + (x + 1) ** 2) / (x + 1) ** 2
assert together(1 + 1 / (x * (1 + x))) == (1 + x * (1 + x)) / (x * (1 + x))
assert together(1 / (x * (x + 1)) + 1 / (x * (x + 2))) == (3 + 2 * x) / (
x * (1 + x) * (2 + x)
)
assert together(1 + 1 / (2 * x + 2) ** 2) == (4 * (x + 1) ** 2 + 1) / (
4 * (x + 1) ** 2
)
assert together(sin(1 / x + 1 / y)) == sin(1 / x + 1 / y)
assert together(sin(1 / x + 1 / y), deep=True) == sin((x + y) / (x * y))
assert together(1 / exp(x) + 1 / (x * exp(x))) == (1 + x) / (x * exp(x))
assert together(1 / exp(2 * x) + 1 / (x * exp(3 * x))) == (1 + exp(x) * x) / (
x * exp(3 * x)
)
assert together(Integral(1 / x + 1 / y, x)) == Integral((x + y) / (x * y), x)
assert together(Eq(1 / x + 1 / y, 1 + 1 / z)) == Eq((x + y) / (x * y), (z + 1) / z)
assert together((A * B) ** -1 + (B * A) ** -1) == (A * B) ** -1 + (B * A) ** -1
| 36.463768
| 88
| 0.408585
|
5f597dd265c4df4c66aa4892e961e128c123b8b9
| 4,367
|
py
|
Python
|
dags/utils/voting/load.py
|
makerdao-data/airflow-docker-image
|
498b8ff83c0e9651c3fd36a91a516ea80f32e8a7
|
[
"Apache-2.0"
] | null | null | null |
dags/utils/voting/load.py
|
makerdao-data/airflow-docker-image
|
498b8ff83c0e9651c3fd36a91a516ea80f32e8a7
|
[
"Apache-2.0"
] | 1
|
2022-03-22T13:57:44.000Z
|
2022-03-22T13:57:44.000Z
|
dags/utils/voting/load.py
|
makerdao-data/airflow-docker-image
|
498b8ff83c0e9651c3fd36a91a516ea80f32e8a7
|
[
"Apache-2.0"
] | null | null | null |
from decimal import Decimal
from datetime import datetime
from airflow.exceptions import AirflowFailException
import os, sys
sys.path.append('/opt/airflow/')
from dags.connectors.sf import sf
from dags.connectors.sf import clear_stage
from dags.adapters.snowflake.stage import transaction_clear_stage, transaction_write_to_stage
from dags.adapters.snowflake.table import transaction_write_to_table
def _load(chief, polls, api_polls, executives, votes, operations, **setup):
try:
clear_stage(f"{setup['votes_db']}.staging.votes_extracts")
sf.execute("BEGIN TRANSACTION; ")
c = []
for i in chief:
temp = i
temp[7] = Decimal(i[7])
c.append(temp)
if c:
pattern = transaction_write_to_stage(sf, c, f"{setup['votes_db']}.staging.votes_extracts")
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.staging.chief",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
p = []
for i in polls:
temp = i
temp[7] = Decimal(i[7])
p.append(temp)
if p:
pattern = transaction_write_to_stage(sf, p, f"{setup['votes_db']}.staging.votes_extracts")
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.staging.polls",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
if api_polls:
pattern = transaction_write_to_stage(sf, api_polls, f"{setup['votes_db']}.staging.votes_extracts")
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.internal.yays",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
if executives:
pattern = transaction_write_to_stage(
sf, executives, f"{setup['votes_db']}.staging.votes_extracts"
)
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.internal.yays",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
if operations:
pattern = transaction_write_to_stage(
sf, operations, f"{setup['votes_db']}.staging.votes_extracts"
)
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.operations.vote",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
if votes:
pattern = transaction_write_to_stage(sf, votes, f"{setup['votes_db']}.staging.votes_extracts")
if pattern:
transaction_write_to_table(
sf,
f"{setup['votes_db']}.staging.votes_extracts",
f"{setup['votes_db']}.public.votes",
pattern,
)
transaction_clear_stage(sf, f"{setup['votes_db']}.staging.votes_extracts", pattern)
sf.execute(
f"""
INSERT INTO {setup['votes_db']}.internal.{setup['votes_scheduler']}(LOAD_ID, START_BLOCK, END_BLOCK, END_TIMESTAMP, STATUS)
VALUES ('{setup['load_id']}', {setup['start_block']} + 1, {setup['end_block']}, '{datetime.utcnow().__str__()[:19]}', 1);
"""
)
sf.execute("COMMIT; ")
except Exception as e:
sf.execute("ROLLBACK; ")
raise AirflowFailException("FATAL: Loading data failed.")
return True
| 36.697479
| 135
| 0.545455
|
da287915c83c610e1feb1a0cb6ebd97f65c429be
| 968
|
py
|
Python
|
src/doc/common/build_options.py
|
bopopescu/sagesmc
|
e8d1d31f6f598dba2d763baa2d2e804338f9e89e
|
[
"BSL-1.0"
] | 5
|
2015-01-04T07:15:06.000Z
|
2022-03-04T15:15:18.000Z
|
src/doc/common/build_options.py
|
bopopescu/sagesmc
|
e8d1d31f6f598dba2d763baa2d2e804338f9e89e
|
[
"BSL-1.0"
] | null | null | null |
src/doc/common/build_options.py
|
bopopescu/sagesmc
|
e8d1d31f6f598dba2d763baa2d2e804338f9e89e
|
[
"BSL-1.0"
] | 10
|
2016-09-28T13:12:40.000Z
|
2022-02-12T09:28:34.000Z
|
###############################################
# Options for building the Sage documentation #
###############################################
import os, re
SAGE_DOC = os.environ['SAGE_DOC']
LANGUAGES = [d for d in os.listdir(SAGE_DOC) if re.match('^[a-z][a-z]$', d)]
SPHINXOPTS = ""
PAPER = ""
OMIT = ["introspect"] # docs/dirs to omit when listing and building 'all'
if PAPER:
PAPEROPTS = "-D latex_paper_size=" + PAPER
else:
PAPEROPTS = ""
#Note that this needs to have the doctrees dir
ALLSPHINXOPTS = SPHINXOPTS + " " + PAPEROPTS + " "
WEBSITESPHINXOPTS = ""
# Number of threads to use for parallel-building the documentation.
NUM_THREADS = int(os.environ.get('SAGE_NUM_THREADS', 1))
# Minimize GAP/libGAP RAM usage in the builder, docbuild already uses too much
from sage.interfaces.gap import set_gap_memory_pool_size
set_gap_memory_pool_size(0) # will be rounded up to 1M
INCREMENTAL_BUILD = os.path.exists(os.path.join(SAGE_DOC, 'output'))
| 33.37931
| 78
| 0.654959
|
b1b4809be931dfcf5a92828f98447f166c0f3b1f
| 1,956
|
py
|
Python
|
sdk/identity/azure-identity/azure/identity/_credentials/azure_ml.py
|
jayqi/azure-sdk-for-python
|
d720e568b7dde29af68fe6a0f57c0dba8dff4b55
|
[
"MIT"
] | 1
|
2022-03-09T08:59:13.000Z
|
2022-03-09T08:59:13.000Z
|
sdk/identity/azure-identity/azure/identity/_credentials/azure_ml.py
|
jayqi/azure-sdk-for-python
|
d720e568b7dde29af68fe6a0f57c0dba8dff4b55
|
[
"MIT"
] | null | null | null |
sdk/identity/azure-identity/azure/identity/_credentials/azure_ml.py
|
jayqi/azure-sdk-for-python
|
d720e568b7dde29af68fe6a0f57c0dba8dff4b55
|
[
"MIT"
] | null | null | null |
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
import functools
import os
from typing import TYPE_CHECKING
from azure.core.pipeline.transport import HttpRequest
from .._constants import EnvironmentVariables
from .._internal.managed_identity_base import ManagedIdentityBase
from .._internal.managed_identity_client import ManagedIdentityClient
if TYPE_CHECKING:
from typing import Any, Optional
class AzureMLCredential(ManagedIdentityBase):
def get_client(self, **kwargs):
# type: (**Any) -> Optional[ManagedIdentityClient]
client_args = _get_client_args(**kwargs)
if client_args:
return ManagedIdentityClient(**client_args)
return None
def get_unavailable_message(self):
# type: () -> str
return "Azure ML managed identity configuration not found in environment"
def _get_client_args(**kwargs):
# type: (dict) -> Optional[dict]
identity_config = kwargs.pop("identity_config", None) or {}
url = os.environ.get(EnvironmentVariables.MSI_ENDPOINT)
secret = os.environ.get(EnvironmentVariables.MSI_SECRET)
if not (url and secret):
# Azure ML managed identity isn't available in this environment
return None
if kwargs.get("client_id"):
identity_config["clientid"] = kwargs.pop("client_id")
if kwargs.get("resource_id"):
identity_config["mi_res_id"] = kwargs.pop("resource_id")
return dict(
kwargs,
identity_config=identity_config,
base_headers={"secret": secret},
request_factory=functools.partial(_get_request, url),
)
def _get_request(url, scope, identity_config):
# type: (str, str, dict) -> HttpRequest
request = HttpRequest("GET", url)
request.format_parameters(dict({"api-version": "2017-09-01", "resource": scope}, **identity_config))
return request
| 32.6
| 104
| 0.681493
|
951fa9ea6aab36f8e200536db53d325a3991a74d
| 897
|
py
|
Python
|
execs/python/cpdbench_zero.py
|
SimonEismann/TCPDBench
|
e2e9158e40f38ca92472ead291f0b11fa5c8d1fd
|
[
"MIT"
] | 67
|
2020-03-17T04:26:16.000Z
|
2022-03-29T16:48:33.000Z
|
execs/python/cpdbench_zero.py
|
SimonEismann/TCPDBench
|
e2e9158e40f38ca92472ead291f0b11fa5c8d1fd
|
[
"MIT"
] | 6
|
2020-05-19T08:59:14.000Z
|
2021-10-05T10:40:50.000Z
|
execs/python/cpdbench_zero.py
|
SimonEismann/TCPDBench
|
e2e9158e40f38ca92472ead291f0b11fa5c8d1fd
|
[
"MIT"
] | 17
|
2020-05-02T21:16:06.000Z
|
2022-02-03T18:38:56.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A method that always returns no change points
Author: G.J.J. van den Burg
Date: 2020-05-07
License: MIT
Copyright: 2020, The Alan Turing Institute
"""
import argparse
import time
from cpdbench_utils import load_dataset, exit_success
def parse_args():
parser = argparse.ArgumentParser(description="Wrapper for None-detector")
parser.add_argument(
"-i", "--input", help="path to the input data file", required=True
)
parser.add_argument("-o", "--output", help="path to the output file")
return parser.parse_args()
def main():
args = parse_args()
data, mat = load_dataset(args.input)
start_time = time.time()
locations = []
stop_time = time.time()
runtime = stop_time - start_time
exit_success(data, args, {}, locations, runtime, __file__)
if __name__ == "__main__":
main()
| 19.5
| 77
| 0.671126
|
807ed6bebda16db2bc98cf42f86d2348edd57bb3
| 528
|
py
|
Python
|
parsers/kindle_parser.py
|
mkorneev/my_quotes_bot
|
cbf1fdc8bb71edeaa4282c29ffadff05ed41e966
|
[
"MIT"
] | null | null | null |
parsers/kindle_parser.py
|
mkorneev/my_quotes_bot
|
cbf1fdc8bb71edeaa4282c29ffadff05ed41e966
|
[
"MIT"
] | null | null | null |
parsers/kindle_parser.py
|
mkorneev/my_quotes_bot
|
cbf1fdc8bb71edeaa4282c29ffadff05ed41e966
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
Parser for Kindle clippings file
"""
import codecs
from collections import namedtuple
Quote = namedtuple('Quote', 'text')
def parse(path):
"""
Returns generator which yields Quotes
"""
with codecs.open(path, 'r') as f:
while f.readline():
f.readline()
f.readline()
line4 = f.readline()
f.readline()
# title, author = re.findall(r'^(.*) \((.*)\)$', line1)[0]
yield Quote(text=line4.strip().decode('utf-8'))
| 19.555556
| 70
| 0.545455
|
345a90bab938a43a541ce569d1cbc62ad27a3351
| 16,182
|
py
|
Python
|
test/functional/p2p_addr_relay.py
|
wilofice/dahomey
|
5cbc2406a27e68bbe30f85a7162b86f4741effab
|
[
"MIT"
] | 1
|
2022-03-19T13:35:37.000Z
|
2022-03-19T13:35:37.000Z
|
test/functional/p2p_addr_relay.py
|
wilofice/danxome
|
5cbc2406a27e68bbe30f85a7162b86f4741effab
|
[
"MIT"
] | null | null | null |
test/functional/p2p_addr_relay.py
|
wilofice/danxome
|
5cbc2406a27e68bbe30f85a7162b86f4741effab
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) 2020-2021 The Danxome Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test addr relay
"""
import random
import time
from test_framework.messages import (
CAddress,
msg_addr,
msg_getaddr,
msg_verack,
)
from test_framework.p2p import (
P2PInterface,
p2p_lock,
P2P_SERVICES,
)
from test_framework.test_framework import DanxomeTestFramework
from test_framework.util import assert_equal, assert_greater_than
class AddrReceiver(P2PInterface):
num_ipv4_received = 0
test_addr_contents = False
_tokens = 1
send_getaddr = True
def __init__(self, test_addr_contents=False, send_getaddr=True):
super().__init__()
self.test_addr_contents = test_addr_contents
self.send_getaddr = send_getaddr
def on_addr(self, message):
for addr in message.addrs:
self.num_ipv4_received += 1
if(self.test_addr_contents):
# relay_tests checks the content of the addr messages match
# expectations based on the message creation in setup_addr_msg
assert_equal(addr.nServices, 9)
if not 2022 <= addr.port < 8343:
raise AssertionError("Invalid addr.port of {} (2022-8342 expected)".format(addr.port))
assert addr.ip.startswith('123.123.')
def on_getaddr(self, message):
# When the node sends us a getaddr, it increments the addr relay tokens for the connection by 1000
self._tokens += 1000
@property
def tokens(self):
with p2p_lock:
return self._tokens
def increment_tokens(self, n):
# When we move mocktime forward, the node increments the addr relay tokens for its peers
with p2p_lock:
self._tokens += n
def addr_received(self):
return self.num_ipv4_received != 0
def on_version(self, message):
self.send_message(msg_verack())
if (self.send_getaddr):
self.send_message(msg_getaddr())
def getaddr_received(self):
return self.message_count['getaddr'] > 0
class AddrTest(DanxomeTestFramework):
counter = 0
mocktime = int(time.time())
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [["-whitelist=addr@127.0.0.1"]]
def run_test(self):
self.oversized_addr_test()
self.relay_tests()
self.inbound_blackhole_tests()
# This test populates the addrman, which can impact the node's behavior
# in subsequent tests
self.getaddr_tests()
self.blocksonly_mode_tests()
self.rate_limit_tests()
def setup_addr_msg(self, num, sequential_ips=True):
addrs = []
for i in range(num):
addr = CAddress()
addr.time = self.mocktime + random.randrange(-100, 100)
addr.nServices = P2P_SERVICES
if sequential_ips:
assert self.counter < 256 ** 2 # Don't allow the returned ip addresses to wrap.
addr.ip = f"123.123.{self.counter // 256}.{self.counter % 256}"
self.counter += 1
else:
addr.ip = f"{random.randrange(128,169)}.{random.randrange(1,255)}.{random.randrange(1,255)}.{random.randrange(1,255)}"
addr.port = 2022 + i
addrs.append(addr)
msg = msg_addr()
msg.addrs = addrs
return msg
def send_addr_msg(self, source, msg, receivers):
source.send_and_ping(msg)
# invoke m_next_addr_send timer:
# `addr` messages are sent on an exponential distribution with mean interval of 30s.
# Setting the mocktime 600s forward gives a probability of (1 - e^-(600/30)) that
# the event will occur (i.e. this fails once in ~500 million repeats).
self.mocktime += 10 * 60
self.nodes[0].setmocktime(self.mocktime)
for peer in receivers:
peer.sync_send_with_ping()
def oversized_addr_test(self):
self.log.info('Send an addr message that is too large')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg = self.setup_addr_msg(1010)
with self.nodes[0].assert_debug_log(['addr message size = 1010']):
addr_source.send_and_ping(msg)
self.nodes[0].disconnect_p2ps()
def relay_tests(self):
self.log.info('Test address relay')
self.log.info('Check that addr message content is relayed and added to addrman')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
num_receivers = 7
receivers = []
for _ in range(num_receivers):
receivers.append(self.nodes[0].add_p2p_connection(AddrReceiver(test_addr_contents=True)))
# Keep this with length <= 10. Addresses from larger messages are not
# relayed.
num_ipv4_addrs = 10
msg = self.setup_addr_msg(num_ipv4_addrs)
with self.nodes[0].assert_debug_log(
[
'received: addr (301 bytes) peer=1',
]
):
self.send_addr_msg(addr_source, msg, receivers)
total_ipv4_received = sum(r.num_ipv4_received for r in receivers)
# Every IPv4 address must be relayed to two peers, other than the
# originating node (addr_source).
ipv4_branching_factor = 2
assert_equal(total_ipv4_received, num_ipv4_addrs * ipv4_branching_factor)
self.nodes[0].disconnect_p2ps()
self.log.info('Check relay of addresses received from outbound peers')
inbound_peer = self.nodes[0].add_p2p_connection(AddrReceiver(test_addr_contents=True, send_getaddr=False))
full_outbound_peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=0, connection_type="outbound-full-relay")
msg = self.setup_addr_msg(2)
self.send_addr_msg(full_outbound_peer, msg, [inbound_peer])
self.log.info('Check that the first addr message received from an outbound peer is not relayed')
# Currently, there is a flag that prevents the first addr message received
# from a new outbound peer to be relayed to others. Originally meant to prevent
# large GETADDR responses from being relayed, it now typically affects the self-announcement
# of the outbound peer which is often sent before the GETADDR response.
assert_equal(inbound_peer.num_ipv4_received, 0)
# Send an empty ADDR message to initialize address relay on this connection.
inbound_peer.send_and_ping(msg_addr())
self.log.info('Check that subsequent addr messages sent from an outbound peer are relayed')
msg2 = self.setup_addr_msg(2)
self.send_addr_msg(full_outbound_peer, msg2, [inbound_peer])
assert_equal(inbound_peer.num_ipv4_received, 2)
self.log.info('Check address relay to outbound peers')
block_relay_peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=1, connection_type="block-relay-only")
msg3 = self.setup_addr_msg(2)
self.send_addr_msg(inbound_peer, msg3, [full_outbound_peer, block_relay_peer])
self.log.info('Check that addresses are relayed to full outbound peers')
assert_equal(full_outbound_peer.num_ipv4_received, 2)
self.log.info('Check that addresses are not relayed to block-relay-only outbound peers')
assert_equal(block_relay_peer.num_ipv4_received, 0)
self.nodes[0].disconnect_p2ps()
def sum_addr_messages(self, msgs_dict):
return sum(bytes_received for (msg, bytes_received) in msgs_dict.items() if msg in ['addr', 'addrv2', 'getaddr'])
def inbound_blackhole_tests(self):
self.log.info('Check that we only relay addresses to inbound peers who have previously sent us addr related messages')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
receiver_peer = self.nodes[0].add_p2p_connection(AddrReceiver())
blackhole_peer = self.nodes[0].add_p2p_connection(AddrReceiver(send_getaddr=False))
initial_addrs_received = receiver_peer.num_ipv4_received
peerinfo = self.nodes[0].getpeerinfo()
assert_equal(peerinfo[0]['addr_relay_enabled'], True) # addr_source
assert_equal(peerinfo[1]['addr_relay_enabled'], True) # receiver_peer
assert_equal(peerinfo[2]['addr_relay_enabled'], False) # blackhole_peer
# addr_source sends 2 addresses to node0
msg = self.setup_addr_msg(2)
addr_source.send_and_ping(msg)
self.mocktime += 30 * 60
self.nodes[0].setmocktime(self.mocktime)
receiver_peer.sync_with_ping()
blackhole_peer.sync_with_ping()
peerinfo = self.nodes[0].getpeerinfo()
# Confirm node received addr-related messages from receiver peer
assert_greater_than(self.sum_addr_messages(peerinfo[1]['bytesrecv_per_msg']), 0)
# And that peer received addresses
assert_equal(receiver_peer.num_ipv4_received - initial_addrs_received, 2)
# Confirm node has not received addr-related messages from blackhole peer
assert_equal(self.sum_addr_messages(peerinfo[2]['bytesrecv_per_msg']), 0)
# And that peer did not receive addresses
assert_equal(blackhole_peer.num_ipv4_received, 0)
self.log.info("After blackhole peer sends addr message, it becomes eligible for addr gossip")
blackhole_peer.send_and_ping(msg_addr())
# Confirm node has now received addr-related messages from blackhole peer
assert_greater_than(self.sum_addr_messages(peerinfo[1]['bytesrecv_per_msg']), 0)
assert_equal(self.nodes[0].getpeerinfo()[2]['addr_relay_enabled'], True)
msg = self.setup_addr_msg(2)
self.send_addr_msg(addr_source, msg, [receiver_peer, blackhole_peer])
# And that peer received addresses
assert_equal(blackhole_peer.num_ipv4_received, 2)
self.nodes[0].disconnect_p2ps()
def getaddr_tests(self):
# In the previous tests, the node answered GETADDR requests with an
# empty addrman. Due to GETADDR response caching (see
# CConnman::GetAddresses), the node would continue to provide 0 addrs
# in response until enough time has passed or the node is restarted.
self.restart_node(0)
self.log.info('Test getaddr behavior')
self.log.info('Check that we send a getaddr message upon connecting to an outbound-full-relay peer')
full_outbound_peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=0, connection_type="outbound-full-relay")
full_outbound_peer.sync_with_ping()
assert full_outbound_peer.getaddr_received()
self.log.info('Check that we do not send a getaddr message upon connecting to a block-relay-only peer')
block_relay_peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=1, connection_type="block-relay-only")
block_relay_peer.sync_with_ping()
assert_equal(block_relay_peer.getaddr_received(), False)
self.log.info('Check that we answer getaddr messages only from inbound peers')
inbound_peer = self.nodes[0].add_p2p_connection(AddrReceiver(send_getaddr=False))
inbound_peer.sync_with_ping()
# Add some addresses to addrman
for i in range(1000):
first_octet = i >> 8
second_octet = i % 256
a = f"{first_octet}.{second_octet}.1.1"
self.nodes[0].addpeeraddress(a, 2022)
full_outbound_peer.send_and_ping(msg_getaddr())
block_relay_peer.send_and_ping(msg_getaddr())
inbound_peer.send_and_ping(msg_getaddr())
# invoke m_next_addr_send timer, see under send_addr_msg() function for rationale
self.mocktime += 10 * 60
self.nodes[0].setmocktime(self.mocktime)
inbound_peer.wait_until(lambda: inbound_peer.addr_received() is True)
assert_equal(full_outbound_peer.num_ipv4_received, 0)
assert_equal(block_relay_peer.num_ipv4_received, 0)
assert inbound_peer.num_ipv4_received > 100
self.nodes[0].disconnect_p2ps()
def blocksonly_mode_tests(self):
self.log.info('Test addr relay in -blocksonly mode')
self.restart_node(0, ["-blocksonly", "-whitelist=addr@127.0.0.1"])
self.mocktime = int(time.time())
self.log.info('Check that we send getaddr messages')
full_outbound_peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=0, connection_type="outbound-full-relay")
full_outbound_peer.sync_with_ping()
assert full_outbound_peer.getaddr_received()
self.log.info('Check that we relay address messages')
addr_source = self.nodes[0].add_p2p_connection(P2PInterface())
msg = self.setup_addr_msg(2)
self.send_addr_msg(addr_source, msg, [full_outbound_peer])
assert_equal(full_outbound_peer.num_ipv4_received, 2)
self.nodes[0].disconnect_p2ps()
def send_addrs_and_test_rate_limiting(self, peer, no_relay, *, new_addrs, total_addrs):
"""Send an addr message and check that the number of addresses processed and rate-limited is as expected"""
peer.send_and_ping(self.setup_addr_msg(new_addrs, sequential_ips=False))
peerinfo = self.nodes[0].getpeerinfo()[0]
addrs_processed = peerinfo['addr_processed']
addrs_rate_limited = peerinfo['addr_rate_limited']
self.log.debug(f"addrs_processed = {addrs_processed}, addrs_rate_limited = {addrs_rate_limited}")
if no_relay:
assert_equal(addrs_processed, 0)
assert_equal(addrs_rate_limited, 0)
else:
assert_equal(addrs_processed, min(total_addrs, peer.tokens))
assert_equal(addrs_rate_limited, max(0, total_addrs - peer.tokens))
def rate_limit_tests(self):
self.mocktime = int(time.time())
self.restart_node(0, [])
self.nodes[0].setmocktime(self.mocktime)
for conn_type, no_relay in [("outbound-full-relay", False), ("block-relay-only", True), ("inbound", False)]:
self.log.info(f'Test rate limiting of addr processing for {conn_type} peers')
if conn_type == "inbound":
peer = self.nodes[0].add_p2p_connection(AddrReceiver())
else:
peer = self.nodes[0].add_outbound_p2p_connection(AddrReceiver(), p2p_idx=0, connection_type=conn_type)
# Send 600 addresses. For all but the block-relay-only peer this should result in addresses being processed.
self.send_addrs_and_test_rate_limiting(peer, no_relay, new_addrs=600, total_addrs=600)
# Send 600 more addresses. For the outbound-full-relay peer (which we send a GETADDR, and thus will
# process up to 1001 incoming addresses), this means more addresses will be processed.
self.send_addrs_and_test_rate_limiting(peer, no_relay, new_addrs=600, total_addrs=1200)
# Send 10 more. As we reached the processing limit for all nodes, no more addresses should be procesesd.
self.send_addrs_and_test_rate_limiting(peer, no_relay, new_addrs=10, total_addrs=1210)
# Advance the time by 100 seconds, permitting the processing of 10 more addresses.
# Send 200 and verify that 10 are processed.
self.mocktime += 100
self.nodes[0].setmocktime(self.mocktime)
peer.increment_tokens(10)
self.send_addrs_and_test_rate_limiting(peer, no_relay, new_addrs=200, total_addrs=1410)
# Advance the time by 1000 seconds, permitting the processing of 100 more addresses.
# Send 200 and verify that 100 are processed.
self.mocktime += 1000
self.nodes[0].setmocktime(self.mocktime)
peer.increment_tokens(100)
self.send_addrs_and_test_rate_limiting(peer, no_relay, new_addrs=200, total_addrs=1610)
self.nodes[0].disconnect_p2ps()
if __name__ == '__main__':
AddrTest().main()
| 43.972826
| 136
| 0.679953
|
4f108cae5082e44b7199955fbd9665d371aaec3a
| 1,779
|
py
|
Python
|
world/load_busstop.py
|
homata/geodjango-hands-on
|
3ecee17c0f14122a72aacc71f187011f0b112d08
|
[
"Apache-2.0"
] | 13
|
2018-12-02T07:40:08.000Z
|
2022-03-13T09:04:34.000Z
|
world/load_busstop.py
|
homata/geodjango-hands-on
|
3ecee17c0f14122a72aacc71f187011f0b112d08
|
[
"Apache-2.0"
] | 3
|
2020-06-05T19:24:35.000Z
|
2021-06-10T20:58:57.000Z
|
download/source_code/world/load_busstop.py
|
homata/geodjango-book
|
94842892bb5f4ea053b8366189e89a36bf7505c5
|
[
"Apache-2.0"
] | 6
|
2018-06-24T10:07:32.000Z
|
2019-08-12T09:46:18.000Z
|
# -*- coding: utf-8 -*-
import os
from django.contrib.gis.utils import LayerMapping
from world.models import Busstop
# Modelとファイルのカラムのマッピング
mapping = {
'p11_001' : 'P11_001' ,
'p11_002' : 'P11_002',
'p11_003_1' : 'P11_003_1',
'p11_003_2' : 'P11_003_2',
'p11_003_3' : 'P11_003_3',
'p11_003_4' : 'P11_003_4',
'p11_003_5' : 'P11_003_5',
'p11_003_6' : 'P11_003_6',
'p11_003_7' : 'P11_003_7',
'p11_003_8' : 'P11_003_8',
'p11_003_9' : 'P11_003_9',
'p11_003_10' : 'P11_003_10',
'p11_003_11' : 'P11_003_11',
'p11_003_12' : 'P11_003_12',
'p11_003_13' : 'P11_003_13',
'p11_003_14' : 'P11_003_14',
'p11_003_15' : 'P11_003_15',
'p11_003_16' : 'P11_003_16',
'p11_003_17' : 'P11_003_17',
'p11_003_18' : 'P11_003_18',
'p11_003_19' : 'P11_003_19',
'p11_004_1' : 'P11_004_1',
'p11_004_2' : 'P11_004_2',
'p11_004_3' : 'P11_004_3',
'p11_004_4' : 'P11_004_4',
'p11_004_5' : 'P11_004_5',
'p11_004_6' : 'P11_004_6',
'p11_004_7' : 'P11_004_7',
'p11_004_8' : 'P11_004_8',
'p11_004_9' : 'P11_004_9',
'p11_004_10' : 'P11_004_10',
'p11_004_11' : 'P11_004_11',
'p11_004_12' : 'P11_004_12',
'p11_004_13' : 'P11_004_13',
'p11_004_14' : 'P11_004_14',
'p11_004_15' : 'P11_004_15',
'p11_004_16' : 'P11_004_16',
'p11_004_17' : 'P11_004_17',
'p11_004_18' : 'P11_004_18',
'p11_004_19' : 'P11_004_19',
'geom' : 'POINT',
}
# ファイルパス
geojson_file = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data', 'busstop.geojson'))
# 実行
def run(verbose=True):
lm = LayerMapping(Busstop, geojson_file, mapping, transform=False, encoding='UTF-8')
lm.save(strict=True, verbose=verbose)
| 30.672414
| 99
| 0.602586
|
321c6689615a03961ec061d43a84e329afe8ef44
| 1,252
|
py
|
Python
|
inferelator/distributed/local_controller.py
|
meichenfang/inferelator
|
47f8ebcc5f303264a75814897c52026b47c57aef
|
[
"BSD-2-Clause"
] | 1
|
2019-05-13T23:12:48.000Z
|
2019-05-13T23:12:48.000Z
|
inferelator/distributed/local_controller.py
|
meichenfang/inferelator
|
47f8ebcc5f303264a75814897c52026b47c57aef
|
[
"BSD-2-Clause"
] | null | null | null |
inferelator/distributed/local_controller.py
|
meichenfang/inferelator
|
47f8ebcc5f303264a75814897c52026b47c57aef
|
[
"BSD-2-Clause"
] | null | null | null |
"""
LocalController just runs everything in a single process
"""
import collections
from inferelator.distributed import AbstractController
from inferelator import utils
from inferelator.utils import Validator as check
class LocalController(AbstractController):
_controller_name = "local"
client = None
is_master = True
chunk = None
@classmethod
def connect(cls, *args, **kwargs):
return True
@classmethod
def sync_processes(cls, *args, **kwargs):
return True
@classmethod
def map(cls, func, *arg, **kwargs):
"""
Map a function across iterable(s) and return a list of results
:param func: function
Mappable function
:param args: iterable
Iterator(s)
"""
assert check.argument_callable(func)
assert check.argument_list_type(arg, collections.Iterable)
return list(map(func, *arg))
@classmethod
def set_processes(cls, process_count):
"""
Set the number of dask workers to use
:param process_count: int
:return:
"""
utils.Debug.vprint("Local does not support multiple cores", level=0)
@classmethod
def shutdown(cls):
return True
| 23.185185
| 76
| 0.642173
|
57528a66597a164b2379b65dec10ea42d49d1a84
| 2,893
|
py
|
Python
|
tools/ci/tc/tests/test_taskgraph.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | null | null | null |
tools/ci/tc/tests/test_taskgraph.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | 59
|
2022-01-19T21:35:57.000Z
|
2022-03-30T21:35:27.000Z
|
tools/ci/tc/tests/test_taskgraph.py
|
BasixKOR/wpt
|
aa27d567c10dcdb2aea6884d5155dfaaa177a800
|
[
"BSD-3-Clause"
] | null | null | null |
# mypy: allow-untyped-defs
import pytest
import yaml
from tools.ci.tc import taskgraph
@pytest.mark.parametrize("data, update_data, expected", [
({"a": 1}, {"b": 2}, {"a": 1, "b": 2}),
({"a": 1}, {"a": 2}, {"a": 2}),
({"a": [1]}, {"a": [2]}, {"a": [1, 2]}),
({"a": {"b": 1, "c": 2}}, {"a": {"b": 2, "d": 3}}, {"a": {"b": 2, "c": 2, "d": 3}}),
({"a": {"b": [1]}}, {"a": {"b": [2]}}, {"a": {"b": [1, 2]}}),
]
)
def test_update_recursive(data, update_data, expected):
taskgraph.update_recursive(data, update_data)
assert data == expected
def test_use():
data = """
components:
component1:
a: 1
b: [1]
c: "c"
component2:
a: 2
b: [2]
d: "d"
tasks:
- task1:
use:
- component1
- component2
b: [3]
c: "e"
"""
tasks_data = yaml.safe_load(data)
assert taskgraph.load_tasks(tasks_data) == {
"task1": {
"a": 2,
"b": [1,2,3],
"c": "e",
"d": "d",
"name": "task1"
}
}
def test_var():
data = """
components:
component1:
a: ${vars.value}
tasks:
- task1:
use:
- component1
vars:
value: 1
"""
tasks_data = yaml.safe_load(data)
assert taskgraph.load_tasks(tasks_data) == {
"task1": {
"a": "1",
"vars": {"value": 1},
"name": "task1"
}
}
def test_map():
data = """
components: {}
tasks:
- $map:
for:
- vars:
a: 1
b: [1]
- vars:
a: 2
b: [2]
do:
- task1-${vars.a}:
a: ${vars.a}
b: [3]
- task2-${vars.a}:
a: ${vars.a}
b: [4]
"""
tasks_data = yaml.safe_load(data)
assert taskgraph.load_tasks(tasks_data) == {
"task1-1": {
"a": "1",
"b": [1, 3],
"vars": {"a": 1},
"name": "task1-1"
},
"task1-2": {
"a": "2",
"b": [2, 3],
"vars": {"a": 2},
"name": "task1-2"
},
"task2-1": {
"a": "1",
"b": [1, 4],
"vars": {"a": 1},
"name": "task2-1"
},
"task2-2": {
"a": "2",
"b": [2, 4],
"vars": {"a": 2},
"name": "task2-2"
},
}
def test_chunks():
data = """
components: {}
tasks:
- task1:
name: task1-${chunks.id}
chunks: 2
"""
tasks_data = yaml.safe_load(data)
assert taskgraph.load_tasks(tasks_data) == {
"task1-1": {
"name": "task1-1",
"chunks": {
"id": 1,
"total": 2
}
},
"task1-2": {
"name": "task1-2",
"chunks": {
"id": 2,
"total": 2
}
}
}
| 19.416107
| 88
| 0.366056
|
2c91e6a881287f23f7a3387269b7cba9f6d7488c
| 217
|
py
|
Python
|
networks/monitoring/nav/manager/container/target/start-cron-services.py
|
alexanderfefelov/docker-backpack
|
33aa9b41451fa54e4573c1fc0557a3b1f8fd37fa
|
[
"MIT"
] | 13
|
2020-10-23T13:10:49.000Z
|
2021-12-07T17:43:39.000Z
|
networks/monitoring/nav/manager/container/target/start-cron-services.py
|
alexanderfefelov/docker-backpack
|
33aa9b41451fa54e4573c1fc0557a3b1f8fd37fa
|
[
"MIT"
] | 101
|
2020-08-09T07:01:23.000Z
|
2021-06-11T11:59:29.000Z
|
networks/monitoring/nav/manager/container/target/start-cron-services.py
|
alexanderfefelov/docker-backpack
|
33aa9b41451fa54e4573c1fc0557a3b1f8fd37fa
|
[
"MIT"
] | 6
|
2020-08-11T09:59:39.000Z
|
2021-11-04T15:45:02.000Z
|
#!/usr/bin/env python3
from nav.startstop import CronService, ServiceRegistry
registry = ServiceRegistry()
for service in registry:
if type(registry[service]) is CronService:
registry[service].start()
| 19.727273
| 54
| 0.741935
|
fa401c88954339276304ae936fc6f58c6bc88be6
| 2,372
|
py
|
Python
|
malleus/api/domain/protos/timings_pb2.py
|
joelgerard/malleus
|
763850ef270a449829b89a998cdce8febf5020ef
|
[
"Apache-2.0"
] | null | null | null |
malleus/api/domain/protos/timings_pb2.py
|
joelgerard/malleus
|
763850ef270a449829b89a998cdce8febf5020ef
|
[
"Apache-2.0"
] | 2
|
2021-02-08T20:22:50.000Z
|
2021-06-01T22:07:40.000Z
|
malleus/api/domain/protos/timings_pb2.py
|
joelgerard/malleus
|
763850ef270a449829b89a998cdce8febf5020ef
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: malleus/api/domain/protos/timings.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from malleus.api.domain.protos import timing_pb2 as malleus_dot_api_dot_domain_dot_protos_dot_timing__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='malleus/api/domain/protos/timings.proto',
package='malleus.api.domain',
syntax='proto3',
serialized_pb=_b('\n\'malleus/api/domain/protos/timings.proto\x12\x12malleus.api.domain\x1a&malleus/api/domain/protos/timing.proto\"6\n\x07Timings\x12+\n\x07timings\x18\x01 \x03(\x0b\x32\x1a.malleus.api.domain.Timingb\x06proto3')
,
dependencies=[malleus_dot_api_dot_domain_dot_protos_dot_timing__pb2.DESCRIPTOR,])
_TIMINGS = _descriptor.Descriptor(
name='Timings',
full_name='malleus.api.domain.Timings',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='timings', full_name='malleus.api.domain.Timings.timings', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=103,
serialized_end=157,
)
_TIMINGS.fields_by_name['timings'].message_type = malleus_dot_api_dot_domain_dot_protos_dot_timing__pb2._TIMING
DESCRIPTOR.message_types_by_name['Timings'] = _TIMINGS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Timings = _reflection.GeneratedProtocolMessageType('Timings', (_message.Message,), dict(
DESCRIPTOR = _TIMINGS,
__module__ = 'malleus.api.domain.protos.timings_pb2'
# @@protoc_insertion_point(class_scope:malleus.api.domain.Timings)
))
_sym_db.RegisterMessage(Timings)
# @@protoc_insertion_point(module_scope)
| 32.493151
| 231
| 0.778246
|
1b4529efb2fa7e45656a5575865cffb8a1b4f9ab
| 8,885
|
py
|
Python
|
predict.py
|
BerkayAydin/Coronavirus-Prediction-Tool
|
8ef91734a36259cc282dc517be4e28b084878963
|
[
"MIT"
] | null | null | null |
predict.py
|
BerkayAydin/Coronavirus-Prediction-Tool
|
8ef91734a36259cc282dc517be4e28b084878963
|
[
"MIT"
] | 1
|
2020-03-01T12:48:21.000Z
|
2020-03-01T12:48:21.000Z
|
predict.py
|
BerkayAydin/Coronavirus-Prediction-Tool
|
8ef91734a36259cc282dc517be4e28b084878963
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from fbprophet import Prophet
from fbprophet.diagnostics import cross_validation
import plotly.express as px
import os
from datetime import datetime
c_url = 'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Confirmed.csv'
d_url = 'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Deaths.csv'
r_url = 'https://raw.githubusercontent.com/CSSEGISandData/COVID-19/master/csse_covid_19_data/csse_covid_19_time_series/time_series_19-covid-Recovered.csv'
def prepare_data(confirmed, deaths, recovered):
confs = {}
death = {}
recs = {}
for i in range(confirmed.shape[0]):
a = confirmed.iloc[i, 4:].reset_index()
a.columns = ['ds', 'y']
confs[confirmed.iloc[i, 0]] = a
for i in range(deaths.shape[0]):
a = deaths.iloc[i, 4:].reset_index()
a.columns = ['ds', 'y']
death[deaths.iloc[i, 0]] = a
for i in range(recovered.shape[0]):
a = recovered.iloc[i, 4:].reset_index()
a.columns = ['ds', 'y']
recs[recovered.iloc[i, 0]] = a
confs['Global'] = confirmed.iloc[:, 4:].sum(axis=0).reset_index()
confs['Global'].columns = ['ds', 'y']
death['Global'] = deaths.iloc[:, 4:].sum(axis=0).reset_index()
death['Global'].columns = ['ds', 'y']
recs['Global'] = recovered.iloc[:, 4:].sum(axis=0).reset_index()
recs['Global'].columns = ['ds', 'y']
return (confs, death, recs)
def predict_n_days(n, df):
with suppress_stdout_stderr():
m = Prophet(changepoint_prior_scale=0.5)
m.fit(df)
future = m.make_future_dataframe(periods=n)
forecast = m.predict(future)
return forecast
def generate_map(df, curr=False):
newDF = df.copy()
if curr:
dates = newDF.columns[4:]
else:
dates = newDF.columns[3:]
df2 = pd.DataFrame(columns=['Place', 'Lat', 'Long', 'Date', 'Size', 'Cases', 'Text', 'Color'])
for place in newDF["Location"].unique():
for date in dates:
Lat = df.loc[df["Location"] == place, "Lat"]
Long = df.loc[df["Location"] == place, "Long"]
number = int(np.ceil(df.loc[df["Location"] == place, date].item()))
text = "Number of cases: " + str(number) + " in " + str(place)
case = int(df.loc[df["Location"] == place, date].item())
size = (np.log(int(df.loc[df["Location"] == place, date].item()))/np.log(1e15))*500
if (np.isinf(size)):
size = 0
if (number < 3):
color = "< 3"
elif (3 <= number < 11):
color = "< 11"
elif (11 <= number < 21):
color = "< 21"
elif (21 <= number < 51):
color = "< 51"
elif (number > 50):
color = "50+"
temp = pd.DataFrame({"Place": [place],
"Lat": int(Lat),
"Long": int(Long),
"Date": date,
"Size": int(size),
"Cases": case,
"Text": str(text),
"Color": color})
df2 = df2.append(temp)
fig = px.scatter_geo(df2, lat="Lat", lon="Long", color="Color",
hover_name="Text", size=(list(df2["Size"])),
animation_frame="Date",
text="Text",
labels={},
locationmode='country names')
fig.update_geos(projection_type="natural earth",
showcountries=True)
return fig
def refresh(ref_type):
confirmed = pd.read_csv(c_url)
deaths = pd.read_csv(d_url)
recovered = pd.read_csv(r_url)
for df in [confirmed, deaths, recovered]:
df.loc[df["Province/State"].isna(), 'Province/State'] = df.loc[df["Province/State"].isna(), 'Country/Region']
(conf, death, recs) = prepare_data(confirmed, deaths, recovered)
fs = pd.DataFrame()
if ref_type == 'confirmed':
for (location, df) in conf.items():
forecast = predict_n_days(14, df)
forecast[['ds', 'yhat']][-14:]
forecast = forecast[['ds', 'yhat']][-14:].T
forecast.columns = map(lambda t: t.strftime('%-m/%-d/%y'), forecast.iloc[0])
forecast = forecast.drop(forecast.index[0])
forecast.insert(0, 'Location', location)
if location != 'Global':
forecast.insert(1, 'Lat', confirmed.loc[confirmed['Province/State'] == location, 'Lat'].item())
forecast.insert(2, 'Long', confirmed.loc[confirmed['Province/State'] == location, 'Long'].item())
else:
forecast.insert(1, 'Lat', None)
forecast.insert(2, 'Long', None)
fs = fs.append(forecast)
print('confirmed, ' + str(len(fs)))
fig = generate_map(fs.iloc[:-1])
fig.write_html('templates/fig_c.html')
elif ref_type == 'deaths':
for (location, df) in death.items():
forecast = predict_n_days(14, df)
forecast[['ds', 'yhat']][-14:]
forecast = forecast[['ds', 'yhat']][-14:].T
forecast.columns = map(lambda t: t.strftime(
'%-m/%-d/%y'), forecast.iloc[0])
forecast = forecast.drop(forecast.index[0])
forecast.insert(0, 'Location', location)
if location != 'Global':
forecast.insert(
1, 'Lat', confirmed.loc[confirmed['Province/State'] == location, 'Lat'].item())
forecast.insert(
2, 'Long', confirmed.loc[confirmed['Province/State'] == location, 'Long'].item())
else:
forecast.insert(1, 'Lat', None)
forecast.insert(2, 'Long', None)
fs = fs.append(forecast)
print('deaths, ' + str(len(fs)))
fig = generate_map(fs.iloc[:-1])
fig.write_html('templates/fig_d.html')
elif ref_type == 'recovered':
for (location, df) in recs.items():
forecast = predict_n_days(14, df)
forecast[['ds', 'yhat']][-14:]
forecast = forecast[['ds', 'yhat']][-14:].T
forecast.columns = map(lambda t: t.strftime(
'%-m/%-d/%y'), forecast.iloc[0])
forecast = forecast.drop(forecast.index[0])
forecast.insert(0, 'Location', location)
if location != 'Global':
forecast.insert(
1, 'Lat', confirmed.loc[confirmed['Province/State'] == location, 'Lat'].item())
forecast.insert(
2, 'Long', confirmed.loc[confirmed['Province/State'] == location, 'Long'].item())
else:
forecast.insert(1, 'Lat', None)
forecast.insert(2, 'Long', None)
fs = fs.append(forecast)
print('recovered, ' + str(len(fs)))
fig = generate_map(fs.iloc[:-1])
fig.write_html('templates/fig_r.html')
elif ref_type == 'curr_confirmed':
fig = generate_map(confirmed, curr=True)
fig.write_html('templates/curr_c.html')
elif ref_type == 'curr_deaths':
fig = generate_map(deaths, curr=True)
fig.write_html('templates/curr_d.html')
elif ref_type == 'curr_recovered':
fig = generate_map(recovered, curr=True)
fig.write_html('templates/curr_r.html')
# from https://stackoverflow.com/questions/11130156/suppress-stdout-stderr-print-from-python-functions
class suppress_stdout_stderr(object):
'''
A context manager for doing a "deep suppression" of stdout and stderr in
Python, i.e. will suppress all print, even if the print originates in a
compiled C/Fortran sub-function.
This will not suppress raised exceptions, since exceptions are printed
to stderr just before a script exits, and after the context manager has
exited (at least, I think that is why it lets exceptions through).
'''
def __init__(self):
# Open a pair of null files
self.null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)]
# Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = (os.dup(1), os.dup(2))
def __enter__(self):
# Assign the null pointers to stdout and stderr.
os.dup2(self.null_fds[0], 1)
os.dup2(self.null_fds[1], 2)
def __exit__(self, *_):
# Re-assign the real stdout/stderr back to (1) and (2)
os.dup2(self.save_fds[0], 1)
os.dup2(self.save_fds[1], 2)
# Close the null files
os.close(self.null_fds[0])
os.close(self.null_fds[1])
| 42.109005
| 154
| 0.556218
|
e1d53892d0091389344910e46313e6b21bffb310
| 7,224
|
py
|
Python
|
trainer.py
|
senadkurtisi/pytorch-image-captioning
|
d7ebf2e6f9bec8bf199b5b0236ffbdfdb5152b46
|
[
"MIT"
] | 1
|
2022-02-08T08:58:38.000Z
|
2022-02-08T08:58:38.000Z
|
trainer.py
|
senadkurtisi/pytorch-image-captioning
|
d7ebf2e6f9bec8bf199b5b0236ffbdfdb5152b46
|
[
"MIT"
] | null | null | null |
trainer.py
|
senadkurtisi/pytorch-image-captioning
|
d7ebf2e6f9bec8bf199b5b0236ffbdfdb5152b46
|
[
"MIT"
] | 1
|
2022-01-10T07:27:15.000Z
|
2022-01-10T07:27:15.000Z
|
import time
import numpy as np
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import torchvision.models as models
from nltk.translate.bleu_score import corpus_bleu
from dataloader import Flickr8KDataset
from decoder import CaptionDecoder
from utils.decoding_utils import greedy_decoding
from utils.utils import save_checkpoint, log_gradient_norm, set_up_causal_mask
def evaluate(subset, encoder, decoder, config, device):
"""Evaluates (BLEU score) caption generation model on a given subset.
Arguments:
subset (Flickr8KDataset): Train/Val/Test subset
encoder (nn.Module): CNN which generates image features
decoder (nn.Module): Transformer Decoder which generates captions for images
config (object): Contains configuration for the evaluation pipeline
device (torch.device): Device on which to port used tensors
Returns:
bleu (float): BLEU-{1:4} scores performance metric on the entire subset - corpus bleu
"""
batch_size = config["batch_size"]["eval"]
max_len = config["max_len"]
bleu_w = config["bleu_weights"]
# Mapping from vocab index to string representation
idx2word = subset._idx2word
# Ids for special tokens
sos_id = subset._start_idx
eos_id = subset._end_idx
pad_id = subset._pad_idx
references_total = []
predictions_total = []
print("Evaluating model.")
for x_img, y_caption in subset.inference_batch(batch_size):
x_img = x_img.to(device)
# Extract image features
img_features = encoder(x_img)
img_features = img_features.view(img_features.size(0), img_features.size(1), -1)
img_features = img_features.permute(0, 2, 1)
img_features = img_features.detach()
# Get the caption prediction for each image in the mini-batch
predictions = greedy_decoding(decoder, img_features, sos_id, eos_id, pad_id, idx2word, max_len, device)
references_total += y_caption
predictions_total += predictions
# Evaluate BLEU score of the generated captions
bleu_1 = corpus_bleu(references_total, predictions_total, weights=bleu_w["bleu-1"]) * 100
bleu_2 = corpus_bleu(references_total, predictions_total, weights=bleu_w["bleu-2"]) * 100
bleu_3 = corpus_bleu(references_total, predictions_total, weights=bleu_w["bleu-3"]) * 100
bleu_4 = corpus_bleu(references_total, predictions_total, weights=bleu_w["bleu-4"]) * 100
bleu = [bleu_1, bleu_2, bleu_3, bleu_4]
return bleu
def train(config, writer, device):
"""Performs the training of the model.
Arguments:
config (object): Contains configuration of the pipeline
writer: tensorboardX writer object
device: device on which to map the model and data
"""
torch.manual_seed(config["seed"])
np.random.seed(config["seed"])
# Define dataloader hyper-parameters
train_hyperparams = {
"batch_size": config["batch_size"]["train"],
"shuffle": True,
"num_workers": 1,
"drop_last": True
}
# Create dataloaders
train_set = Flickr8KDataset(config, config["split_save"]["train"], training=True)
valid_set = Flickr8KDataset(config, config["split_save"]["validation"], training=False)
train_loader = DataLoader(train_set, **train_hyperparams)
#######################
# Set up the encoder
#######################
# Download pretrained CNN encoder
encoder = models.resnet50(pretrained=True)
# Extract only the convolutional backbone of the model
encoder = torch.nn.Sequential(*(list(encoder.children())[:-2]))
encoder = encoder.to(device)
# Freeze encoder layers
for param in encoder.parameters():
param.requires_grad = False
encoder.eval()
######################
# Set up the decoder
######################
# Instantiate the decoder
decoder = CaptionDecoder(config)
decoder = decoder.to(device)
if config["checkpoint"]["load"]:
checkpoint_path = config["checkpoint"]["path"]
decoder.load_state_dict(torch.load(checkpoint_path))
decoder.train()
# Set up causal mask for transformer decoder
causal_mask = set_up_causal_mask(config["max_len"], device)
# Load training configuration
train_config = config["train_config"]
learning_rate = train_config["learning_rate"]
# Prepare the model optimizer
optimizer = torch.optim.AdamW(
decoder.parameters(),
lr=train_config["learning_rate"],
weight_decay=train_config["l2_penalty"]
)
# Loss function
loss_fcn = nn.CrossEntropyLoss(label_smoothing=0.1)
start_time = time.strftime("%b-%d_%H-%M-%S")
train_step = 0
for epoch in range(train_config["num_of_epochs"]):
print("Epoch:", epoch)
decoder.train()
for x_img, x_words, y, tgt_padding_mask in train_loader:
optimizer.zero_grad()
train_step += 1
# Move the used tensors to defined device
x_img, x_words = x_img.to(device), x_words.to(device)
y = y.to(device)
tgt_padding_mask = tgt_padding_mask.to(device)
# Extract image features
with torch.no_grad():
img_features = encoder(x_img)
img_features = img_features.view(img_features.size(0), img_features.size(1), -1)
img_features = img_features.permute(0, 2, 1)
img_features = img_features.detach()
# Get the prediction of the decoder
y_pred = decoder(x_words, img_features, tgt_padding_mask, causal_mask)
tgt_padding_mask = torch.logical_not(tgt_padding_mask)
y_pred = y_pred[tgt_padding_mask]
y = y[tgt_padding_mask]
# Calculate the loss
loss = loss_fcn(y_pred, y.long())
# Update model weights
loss.backward()
log_gradient_norm(decoder, writer, train_step, "Before")
torch.nn.utils.clip_grad_norm_(decoder.parameters(), train_config["gradient_clipping"])
log_gradient_norm(decoder, writer, train_step, "After")
optimizer.step()
writer.add_scalar("Train/Step-Loss", loss.item(), train_step)
writer.add_scalar("Train/Learning-Rate", learning_rate, train_step)
# Save the model and optimizer state
save_checkpoint(decoder, optimizer, start_time, epoch)
# Evaluate model performance
if (epoch + 1) % train_config["eval_period"] == 0:
with torch.no_grad():
encoder.eval()
decoder.eval()
# Evaluate model performance on subsets
train_bleu = evaluate(train_set, encoder, decoder, config, device)
valid_bleu = evaluate(valid_set, encoder, decoder, config, device)
# Log the evaluated BLEU score
for i, t_b in enumerate(train_bleu):
writer.add_scalar(f"Train/BLEU-{i+1}", t_b, epoch)
for i, v_b in enumerate(valid_bleu):
writer.add_scalar(f"Valid/BLEU-{i+1}", v_b, epoch)
decoder.train()
print()
| 37.237113
| 111
| 0.650055
|
9d8a4a4e40f1eb2d3f83012f8f1376e81b80e192
| 256
|
py
|
Python
|
airloft/utils/models.py
|
Allaye/airloft
|
deb6274b9eb1de2de79d0b152d67411d1f747afe
|
[
"FTL"
] | 2
|
2022-03-06T10:34:12.000Z
|
2022-03-27T15:41:52.000Z
|
airloft/utils/models.py
|
Allaye/airloft
|
deb6274b9eb1de2de79d0b152d67411d1f747afe
|
[
"FTL"
] | null | null | null |
airloft/utils/models.py
|
Allaye/airloft
|
deb6274b9eb1de2de79d0b152d67411d1f747afe
|
[
"FTL"
] | null | null | null |
from django.db import models
class ModelTracker(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ('-created_at',)
| 23.272727
| 56
| 0.691406
|
c3067491e8252a49ad8a53461b763f0acdf3468f
| 10,575
|
py
|
Python
|
cert-rotate/cert-rotate.py
|
WoTTsecurity/google-core-iot
|
edb835107c1b3005181f438b0f9ba09eb62242f0
|
[
"MIT"
] | null | null | null |
cert-rotate/cert-rotate.py
|
WoTTsecurity/google-core-iot
|
edb835107c1b3005181f438b0f9ba09eb62242f0
|
[
"MIT"
] | 2
|
2019-07-11T16:00:57.000Z
|
2021-06-03T12:12:11.000Z
|
cert-rotate/cert-rotate.py
|
WoTTsecurity/google-core-iot
|
edb835107c1b3005181f438b0f9ba09eb62242f0
|
[
"MIT"
] | 1
|
2019-06-27T16:58:12.000Z
|
2019-06-27T16:58:12.000Z
|
import os
import pytz
import json
import requests
import datetime
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from google.oauth2 import service_account
from googleapiclient import discovery
from googleapiclient.errors import HttpError
# Google IoT core project settings
PROJECT_ID = os.getenv('PROJECT_ID')
CLOUD_REGION = os.getenv('CLOUD_REGION')
REGISTRY_ID = os.getenv('REGISTRY_ID')
SERVICE_ACCOUNT_JSON = os.getenv('GOOGLE_APPLICATION_CREDENTIALS', 'service_account.json')
# remove expired certs from google IoT registry, while update devices
REMOVE_EXPIRED_CERTS = True
# wott api token
WOTT_API_TOKEN = os.getenv('WOTT_API_TOKEN')
def check_var(name):
if globals()[name] is None:
print('Environment variable {} must be set'.format(name))
exit(1)
def _error_print(e, msg):
error = json.loads(e.content)
print("\n{}, Code: {}, Status: {}".format(msg, error["error"]["code"], error["error"]["status"]))
print("Message: {}\n".format(error["error"]["message"]))
def get_client(service_account_json):
"""Returns an authorized API client by discovering the IoT API and creating
a service object using the service account credentials JSON."""
api_scopes = ['https://www.googleapis.com/auth/cloud-platform']
api_version = 'v1'
discovery_api = 'https://cloudiot.googleapis.com/$discovery/rest'
service_name = 'cloudiotcore'
credentials = service_account.Credentials.from_service_account_file(service_account_json)
scoped_credentials = credentials.with_scopes(api_scopes)
discovery_url = '{}?version={}'.format(discovery_api, api_version)
try:
return discovery.build(
service_name,
api_version,
discoveryServiceUrl=discovery_url,
credentials=scoped_credentials
)
except HttpError as e:
_error_print(e, "Error while creating Google IoT Core client")
return None
def enroll_device(client, registry_path, device_id, certificate):
device_template = {
'id': device_id,
'credentials': [{
'publicKey': {
'format': 'ES256_X509_PEM',
'key': certificate,
},
'expirationTime': get_certificate_expiration_date(certificate).strftime('%Y-%m-%dT%H:%M:%SZ')
}]
}
try:
devices = client.projects().locations().registries().devices()
return devices.create(parent=registry_path, body=device_template).execute()
except HttpError as e:
_error_print(e, "Error while enrolling device")
return None
def patch_device(client, registry_path, device_id, credentials):
patch = {
'credentials': credentials
}
try:
device_name = '{}/devices/{}'.format(registry_path, device_id)
return client.projects().locations().registries().devices().patch(
name=device_name, updateMask='credentials', body=patch).execute()
except HttpError as e:
_error_print(e, "Error while patching device")
return None
def get_devices(client, registry_path):
"""Retrieve the devices."""
dev_list = get_device_list(client, registry_path)
if dev_list is None:
return None
device_list = []
devices = client.projects().locations().registries().devices()
for dev in dev_list:
device_name = '{}/devices/{}'.format(registry_path, dev.get('id'))
try:
device = devices.get(name=device_name).execute()
except HttpError as e:
_error_print(e, "Error while retrieve IoT device")
continue
device_list.append(device)
return device_list
def get_device_list(client, registry_path):
"""
Get Google IoT Registry device list
:param client: Google IoT access client object
:param registry_path: Google IoT Registry path
:return: list of devices in registry (names and ids only)
"""
try:
devices = client.projects().locations().registries().devices(
).list(parent=registry_path).execute().get('devices', [])
return devices
except HttpError as e:
_error_print(e, "Error while retrieving devices")
return None
def get_wott_device_list(token):
try:
req = requests.get("https://api.wott.io/v0.2/list-devices",
headers={"Authorization": "Token {}".format(token), "Content-Type": "application/json"})
except requests.exceptions.RequestException as e:
print("{}".format(e))
return None
return req
def get_wott_device_cert(device_id):
try:
req = requests.get('https://api.wott.io/v0.2/device-cert/{}'.format(device_id))
except requests.exceptions.RequestException as e:
print("{}".format(e))
return None
return req
def get_certificate_expiration_date(cert_string):
"""
Returns the expiration date of the certificate.
"""
cert = x509.load_pem_x509_certificate(cert_string.encode(), default_backend())
return cert.not_valid_after.replace(tzinfo=pytz.utc)
def is_certificate_expired(cert_string):
return datetime.datetime.now(datetime.timezone.utc) > get_certificate_expiration_date(cert_string)
def parse_wott_devices(wott_dev_list):
devices = {}
print("{:>50}|{:>12}".format("device name", "cert. state"))
print("-" * 63)
for device in wott_dev_list:
device_id = device['device']['device_id']
cert_resp = get_wott_device_cert(device_id)
if cert_resp is None or not cert_resp.ok:
print("Could not get device cert for device {} from WoTT server".format(device_id))
cert = None
expired = True
else:
cert = cert_resp.text
expired = is_certificate_expired(cert)
gcloud_dev_id = 'a-' + device_id if device_id[:1].isdigit() else device_id
devices[device_id] = {
'wott': device,
'cert': cert,
'gc_id': gcloud_dev_id,
'done': False,
'expired': expired,
'expirationTime': get_certificate_expiration_date(cert).strftime('%Y-%m-%dT%H:%M:%SZ')
}
cert_state = 'Invalid' if cert is None else 'OK' if not expired else 'Expired'
print("{:>50}|{:>12}".format(device_id, cert_state))
print("-" * 8)
return devices
def print_giot_devices(giot_dev_list):
print("{:>50}|{}".format("device name", "cert. expiration"))
print("-" * 75)
for device in giot_dev_list:
for idx, credential in enumerate(device.get('credentials')):
if idx == 0:
print("{:>50}|{}".format(device.get('id'), credential.get('expirationTime')))
else:
print("{:^50}|{}".format('-- ... --', credential.get('expirationTime')))
print("-" * 8)
def main():
check_var('PROJECT_ID')
check_var('REGISTRY_ID')
check_var('CLOUD_REGION')
check_var('WOTT_API_TOKEN')
if not os.path.isfile(SERVICE_ACCOUNT_JSON):
print('File {} not found.\n'
'Perhaps the environment variable {} is not set or is not set correctly'.format(
SERVICE_ACCOUNT_JSON, 'GOOGLE_APPLICATION_CREDENTIALS'
))
exit(1)
def retrieve_giot_devices():
print('\nretrieving device list from google registry...')
dev_list = get_devices(client, registry_name)
if dev_list is None:
exit(1)
print("{} devices retrieved.".format(len(dev_list)))
print_giot_devices(dev_list)
return dev_list
project_id = PROJECT_ID
cloud_region = CLOUD_REGION
registry_id = REGISTRY_ID
service_account_json = SERVICE_ACCOUNT_JSON
registry_name = 'projects/{}/locations/{}/registries/{}'.format(
project_id, cloud_region, registry_id)
client = get_client(service_account_json)
print('retrieving device list from wott dashboard...')
dev_list_resp = get_wott_device_list(WOTT_API_TOKEN)
if dev_list_resp is None or not dev_list_resp.ok:
print("Could not get device list from WoTT server")
exit(1)
wott_dev_list = dev_list_resp.json()
print("{} devices retrieved.".format(len(wott_dev_list)))
devices = parse_wott_devices(wott_dev_list)
gc_dev_list = retrieve_giot_devices()
updated = 0
for device in gc_dev_list:
giot_dev_id = device.get('id')
wott_dev_id = giot_dev_id[2:] if giot_dev_id[:2] == 'a-' else giot_dev_id
if wott_dev_id in devices:
devices[wott_dev_id]['done'] = True # mark that this device was found and processed
credentials = device.get('credentials')
if credentials is None:
credentials = []
# if wott.cert expired and not need to remove google expired, then not need to patch
skip = devices[wott_dev_id]['expired'] and not REMOVE_EXPIRED_CERTS
for idx, cred in enumerate(credentials):
if cred['publicKey']['key'] == devices[wott_dev_id]['cert']: # cert is already here, skip that device
skip = True
break
if skip:
continue
if REMOVE_EXPIRED_CERTS:
credentials = [cred for cred in credentials if not is_certificate_expired(cred['publicKey']['key'])]
if not devices[wott_dev_id]['expired']:
credentials.append(
{
'publicKey': {
'format': 'ES256_X509_PEM',
'key': devices[wott_dev_id]['cert']
},
'expirationTime': devices[wott_dev_id]['expirationTime']
}
)
print("patch {} with new cert...".format(giot_dev_id))
if patch_device(client, registry_name, giot_dev_id, credentials) is not None:
updated += 1
for wott_dev_id, device in devices.items():
if not device['done'] and not device['expired']:
# if this device was not processed in previous cycle, then it is new one. skiping expired and invalid ones.
print("Enroll {} as {}...".format(wott_dev_id, device['gc_id']))
if enroll_device(client, registry_name, device['gc_id'], device['cert']) is not None:
updated += 1
print("\n{} devices updated/enrolled.\n".format(updated))
if updated > 0:
retrieve_giot_devices()
if __name__ == '__main__':
main()
| 33.046875
| 119
| 0.632057
|
ba9a44d335062e59653d90dd20a38fdcd564a054
| 3,951
|
py
|
Python
|
Tool.py
|
bachnguyenhuu/RENAT
|
f747996e1b79284ef70c51b71774098c200abc7f
|
[
"Apache-2.0"
] | 65
|
2018-01-23T00:25:52.000Z
|
2022-02-03T12:02:59.000Z
|
Tool.py
|
bachnguyenhuu/RENAT
|
f747996e1b79284ef70c51b71774098c200abc7f
|
[
"Apache-2.0"
] | 11
|
2018-01-25T05:29:14.000Z
|
2021-01-03T12:07:33.000Z
|
Tool.py
|
bachnguyenhuu/RENAT
|
f747996e1b79284ef70c51b71774098c200abc7f
|
[
"Apache-2.0"
] | 18
|
2018-01-25T03:09:00.000Z
|
2021-12-15T10:41:15.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2017-2020 NTT Communications
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess,time,signal
import Common
from robot.libraries.BuiltIn import BuiltIn
import robot.libraries.DateTime as DateTime
class Tool(object):
""" A collection of useful tools
Contains some useful tools for packet capture, crafting and firewall testing ...
*Note*: be careful about the argument of the command, it some cases they
could block the test.
Some commands need to sudo privileges to run. Below is sample sudo setting
files that allows related commands run as `root` without password. *Note*:
consider security risks carefully before using this setting.
Sample sudoer setting in Centos system:
| [root@walle renat]# cat /etc/sudoers.d/renat
| Cmnd_Alias CMD_ROBOT_ALLOW = /bin/kill,/usr/local/bin/nmap,/usr/sbin/hping3,/usr/sbin/tcpdump
| %techno ALL=NOPASSWD: CMD_ROBOT_ALLOW
| %jenkins ALL=NOPASSWD: CMD_ROBOT_ALLOW
"""
ROBOT_LIBRARY_SCOPE = 'TEST SUITE'
ROBOT_LIBRARY_VERSION = Common.version()
def __init__(self):
pass
def merge_cap(sef,result_file,*args):
""" Merges multi pcap files into one
"""
BuiltIn().log("Merges pcap files")
cmd_line = '/usr/sbin/mergecap ' + ' '.join(args) + ' -w ' + result_file
result = subprocess.check_output(cmd_line,stderr=subprocess.STDOUT,shell=True)
BuiltIn().log("Merged `%d` files to `%s`" % (len(args),result_file))
BuiltIn().log(result)
return result
def hping(self,*args):
""" Uses hping3 for multi purposes
"""
BuiltIn().log('Execute hping')
cmd_line = 'sudo -S /usr/sbin/hping3 ' + ' '.join(args)
result = subprocess.check_output(cmd_line,stderr=subprocess.STDOUT,shell=True)
BuiltIn().log(result)
BuiltIn().log('Executed hping')
return result
def nmap(self,params):
""" Uses nmap for multi purposes
"""
BuiltIn().log('Execute Nmap')
cmd_line = 'nmap ' + params
result = subprocess.check_output(cmd_line,stderr=subprocess.STDOUT,shell=True)
BuiltIn().log(result)
BuiltIn().log('Executed Nmap')
return result
def tcpdump_to_file(self,filename='capture.pcap',params='', timeout='10s'):
""" Uses tcpdump (for packet capture) and wait
The keyword ignores detail output of the command.
By default, the keyword only captures 10s
"""
BuiltIn().log('Run tcpdump command')
result_file = '%s/%s' % (Common.get_result_path(),filename)
cmd = 'sudo /usr/sbin/tcpdump %s -w %s' % (params,result_file)
proc1 = subprocess.Popen(cmd,stderr=subprocess.STDOUT,stdout=subprocess.PIPE,shell=True,preexec_fn=os.setpgrp)
time.sleep(DateTime.convert_time(timeout))
output2 = subprocess.check_output('sudo /bin/kill %s' % proc1.pid,shell=True)
time.sleep(1)
output1 = b'\n'.join(proc1.stdout.readlines())
BuiltIn().log(output1)
BuiltIn().log(output2)
# change owner of the captured file
username = Common.current_username()
usergroup = Common.current_usergroup()
output = subprocess.check_output('sudo /bin/chown %s:%s %s' % (username,usergroup,result_file),shell=True)
BuiltIn().log('Executed tcpdump command `%s`' % cmd)
| 36.925234
| 118
| 0.667679
|
0656f50d5c6544e7da6cc25335a827bfd5294e46
| 623
|
py
|
Python
|
Homeworks/scripts/data_visualization.py
|
LetteraUnica/DSSC_DL_2021
|
97d83a59d33cfc1a6dcb20abb4209d7ee7fb6e63
|
[
"MIT"
] | null | null | null |
Homeworks/scripts/data_visualization.py
|
LetteraUnica/DSSC_DL_2021
|
97d83a59d33cfc1a6dcb20abb4209d7ee7fb6e63
|
[
"MIT"
] | null | null | null |
Homeworks/scripts/data_visualization.py
|
LetteraUnica/DSSC_DL_2021
|
97d83a59d33cfc1a6dcb20abb4209d7ee7fb6e63
|
[
"MIT"
] | null | null | null |
import pylab as pl
def plot_train_test(train_losses: list, test_losses: list):
"""
Plots the training and test loss in two separate graphs
"""
import seaborn as sns
sns.set_theme()
epochs = list(range(1, len(train_losses)+1))
fig, axes = pl.subplots(1, 2, figsize=(12,4))
axes[0].plot(epochs, train_losses, label="train loss")
axes[1].plot(epochs, test_losses, label="test misclassification rate")
axes[0].title.set_text("Training loss")
axes[1].title.set_text("Test misclassification rate")
axes[0].set_xlabel("Number of epochs")
axes[1].set_xlabel("Number of epochs")
| 36.647059
| 74
| 0.688604
|
7d23109baf68f4be9d12aa307957addfdc4b4913
| 4,088
|
py
|
Python
|
cw/aero_file/linear_nd_interpolation_coefficient_model.py
|
aarondewindt/cw
|
6be9770da0f0fc34ea47d7ab83e6929c4823e98a
|
[
"MIT"
] | 1
|
2021-10-06T07:20:43.000Z
|
2021-10-06T07:20:43.000Z
|
cw/aero_file/linear_nd_interpolation_coefficient_model.py
|
aarondewindt/cw
|
6be9770da0f0fc34ea47d7ab83e6929c4823e98a
|
[
"MIT"
] | 3
|
2019-02-18T13:49:39.000Z
|
2020-12-28T04:13:27.000Z
|
cw/aero_file/linear_nd_interpolation_coefficient_model.py
|
aarondewindt/cw
|
6be9770da0f0fc34ea47d7ab83e6929c4823e98a
|
[
"MIT"
] | null | null | null |
from cw.aero_file.coefficient_model_base import CoefficientModelBase
from cw.cached import cached
import scipy.interpolate as interpolate
import numpy as np
class LinearNDInterpolationCoefficientModel(CoefficientModelBase):
"""
Aerodynamic coefficient model based on scipy's
:class:`scipy.interpolate.LinearNDInterpolator`.
:param list parameter_names: List of strings with the names of the parameters
corresponding to each dimension.
:param numpy.ndarray points: Data point coordinates, or a precomputed scipy
Delaunay triangulation.
:param numpy.ndarray values: Data values.
:param number fill_value: Value used to fill in for requested points outside
of the convex hull of the input points. If not provided, then the default is nan.
:param bool rescale: Rescale points to unit cube before performing interpolation.
This is useful if some of the input dimensions have incommensurable units and
differ by many orders of magnitude.
"""
model_name = "linear_nd_interpolation"
def __init__(self, parameter_names, points, values, fill_value=float('nan'), rescale=False):
self.__parameter_names = list(parameter_names)
self.points = np.array(points)
self.values = np.array(values)
self.fill_value = fill_value
self.rescale = rescale
self.interpolator = interpolate.LinearNDInterpolator(
points,
values,
fill_value,
rescale
)
@cached
def parameter_names(self):
"""
List of strings with the names of the parameters for each dimension (alpha, mach, etc,).
"""
return self.__parameter_names
def get_coefficient(self, *args, **kwargs):
"""
Returns the coefficients corresponding to the conditions defined by the
parameters passed. These parameters depend on the table stored and parameters defined.
.. note::
You must either pass all parameters named or unnamed. Mixing named and unnamed
parameters is not supported.
"""
if args:
# Call the interpolator and pass the values in args.
return self.interpolator(np.array(args))
else:
# 1. Get a list with the values for each parameter in the same order as the
# one defined in self.parameter_names. This line will also raise an
# exception if kwargs does not contain all parameters defined in
# self.parameter_names
# 2. Call the interpolator and return the resulting value.
# TODO: fix this
raise NotImplementedError()
return self.interpolator(tuple([kwargs[param_name] for param_name in self.parameter_names]))
def dump_data(self):
"""
Returns a dictionary containing the parameters to be saved
to the data file. These are the same parameters that the constructor takes in as input.
"""
return {
"parameter_names": list(self.parameter_names),
"points": self.points.tolist(),
"values": self.values.tolist(),
"fill_value": self.fill_value,
"rescale": self.rescale
}
def point_value_tables(self, points=None):
"""
Returns two tables with the point coordinates (parameter values), and coefficient values.
Each row in the table corresponds to a specific data point. It can be used as the input
for the CoKringing aerodynamics code.
:param list points: Optional list of points in the point value tables.
:return: Tuple with two :class:`numpy.ndarray` instances with the point coordinates and
coefficient value tables respectively.
"""
# If the points are given, run the default implementation of this function. This
# will call get_coefficient for each point and return the tables.
if points is not None:
return super().point_value_tables(points)
return self.points, self.values
| 40.88
| 104
| 0.666341
|
a0d98f7752778e342678a5073034ca6ba00ee9e0
| 8,143
|
py
|
Python
|
135.py
|
Jamal135/135Project
|
0176ff10b1a8e57bdb2ca7a45ed963f3ca2ea332
|
[
"MIT"
] | 3
|
2021-06-07T15:04:10.000Z
|
2021-11-17T16:37:48.000Z
|
135.py
|
Jamal135/135Project
|
0176ff10b1a8e57bdb2ca7a45ed963f3ca2ea332
|
[
"MIT"
] | 4
|
2021-11-04T02:36:55.000Z
|
2021-12-06T13:39:49.000Z
|
135.py
|
Jamal135/135Project
|
0176ff10b1a8e57bdb2ca7a45ed963f3ca2ea332
|
[
"MIT"
] | 1
|
2021-11-04T14:10:22.000Z
|
2021-11-04T14:10:22.000Z
|
# python 135.py
import validation
from functions.encryption._147cipher_ import encrypt_147, decrypt_147
from functions.encryption._135cipher_ import encrypt_135, decrypt_135
from functions.encryption._101cipher_ import encrypt_101, decrypt_101
from functions.datatools._basetool_ import base_convert
from functions.datatools._counttool_ import count_analysis
from flask import Flask, render_template, send_from_directory, jsonify
from flask_wtf.csrf import CSRFProtect
from os import path, urandom
# Set up CSRF protection.
csrf = CSRFProtect()
app = Flask(__name__)
app.config['SECRET_KEY'] = urandom(32)
app.config['WTF_CSRF_TIME_LIMIT'] = None
csrf.init_app(app)
# Specify icon.
@app.route('/favicon.ico')
def favicon():
return send_from_directory(path.join(app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft.icon')
# --Main pages--
@app.route("/", methods=["GET"])
@app.route("/home", methods=["GET"])
def home():
return render_template('other/home.html', title='Home')
@app.route("/about", methods=["GET"])
def about():
return render_template('other/about.html', title="About")
# --Encryption pages--
@app.route("/encryption", methods=["GET"])
@app.route("/encryption/viewall", methods=["GET"])
def encryption_viewall():
return render_template('encryption/viewall.html', title="Encryption")
# 135Cipher.
@app.route("/encryption/135cipher", methods=['GET'])
def cipher135():
return render_template('encryption/135cipher.html', title="135Cipher")
@app.route("/encryption/135cipher/result", methods=['POST'])
def cipher135_result():
form = validation.Cipher135Form()
if form.validate_on_submit():
key = form.key.data
text = form.text.data
random = form.random.data
if random == True:
random_input = "+"
elif random == False:
random_input = "-"
try:
if form.encrypt.data:
return jsonify(encrypt_135(key, text, random_input))
elif form.decrypt.data:
return jsonify(decrypt_135(key, text))
except:
return jsonify("Process Execution Failed")
else:
errors = form.errors
for form_value in errors:
errors[form_value] = errors[form_value][0]
errors["error"] = True
return jsonify(errors)
@app.route("/encryption/135cipher/about", methods=["GET"])
def cipher135_about():
return render_template('encryption/135cipher-about.html', title="135Cipher")
# 147Cipher.
@app.route("/encryption/147cipher", methods=['GET'])
def cipher147():
return render_template('encryption/147cipher.html', title="147Cipher")
@app.route("/encryption/147cipher/result", methods=['POST'])
def cipher147_result():
form = validation.Cipher147Form()
if form.validate_on_submit():
key = form.key.data
text = form.text.data
nonce = form.nonce.data
encoding = form.encoding.data
try:
if form.encrypt.data:
return jsonify(encrypt_147(key, text, encoding, nonce))
elif form.decrypt.data:
return jsonify(decrypt_147(key, text, encoding))
except:
return jsonify("Process Execution Failed")
else:
errors = form.errors
for form_value in errors:
errors[form_value] = errors[form_value][0]
errors["error"] = True
return jsonify(errors)
@app.route("/encryption/147cipher/about", methods=["GET"])
def cipher147_about():
return render_template('encryption/147cipher-about.html', title="147Cipher")
# 101Cipher.
@app.route("/encryption/101cipher", methods=["GET"])
def cipher101():
return render_template('encryption/101cipher.html', title="101Cipher")
@app.route("/encryption/101cipher/result", methods=['POST'])
def cipher101_result():
form = validation.Cipher101Form()
if form.validate_on_submit():
key = form.key.data
number = form.number.data
try:
if form.encrypt.data:
return jsonify(encrypt_101(key, number))
elif form.decrypt.data:
return jsonify(decrypt_101(key, number))
except:
return jsonify("Process Execution Failed")
else:
errors = form.errors
for form_value in errors:
errors[form_value] = errors[form_value][0]
errors["error"] = True
return jsonify(errors)
@app.route("/encryption/101cipher/about", methods=["GET"])
def cipher101_about():
return render_template('encryption/101cipher-about.html', title="101Cipher")
# --Steganography pages--
@app.route("/steganography", methods=["GET"])
@app.route("/steganography/viewall", methods=["GET"])
def steganography_viewall():
return render_template('steganography/viewall.html', title="Steganography")
# 122Picture.
@app.route("/steganography/122stego", methods=['GET', 'POST'])
def stego122():
form = validation.Stego122Form()
return render_template('steganography/122stego.html', title="122Stego", form=None)
@app.route("/steganography/122stego/about", methods=["GET"])
def stego122_about():
return render_template('steganography/122stego-about.html', title="122Stego")
# --Tool pages--
@app.route("/datatools", methods=["GET"])
@app.route("/datatools/viewall", methods=["GET"])
def datatools_viewall():
return render_template('datatools/viewall.html', title="Data Tools")
# Base tool.
@app.route("/datatools/basetool", methods=["GET"])
def basetool():
return render_template('datatools/basetool.html', title="Base Tool")
@app.route("/datatools/basetool/result", methods=["POST"])
def basetool_result():
form = validation.BasetoolForm()
if form.validate_on_submit():
inbase = form.inbase.data
outbase = form.outbase.data
number = form.number.data
insequence = form.insequence.data
outsequence = form.outsequence.data
try:
return jsonify(base_convert(number, inbase, outbase, insequence, outsequence))
except:
return jsonify("Process Execution Failed")
else:
errors = form.errors
for form_value in errors:
errors[form_value] = errors[form_value][0]
errors["error"] = True
return jsonify(errors)
@app.route("/datatools/basetool/about", methods=["GET"])
def basetool_about():
return render_template('datatools/basetool-about.html', title="Base Tool")
# Count tool.
@app.route("/datatools/counttool", methods=["GET"])
def counttool():
return render_template('datatools/counttool.html', title="Count Tool")
@app.route("/datatools/counttool/result", methods=["POST"])
def counttool_result():
form = validation.CounttoolForm()
if form.validate_on_submit():
text = form.text.data
spaces = form.spaces.data
capitals = form.capitals.data
try:
return jsonify(count_analysis(text, spaces, capitals))
except:
return jsonify("Process Execution Failed")
else:
errors = form.errors
for form_value in errors:
errors[form_value] = errors[form_value][0]
errors["error"] = True
return jsonify(errors)
@app.route("/datatools/counttool/about", methods=["GET"])
def counttool_about():
return render_template('datatools/counttool-about.html', title="Count Tool")
@app.route("/datatools/imagetool", methods=["GET"])
def imagetool():
return render_template('datatools/imagetool.html', title="Image Tool")
# --Other pages--
@app.route("/disclaimer", methods=["GET"])
def disclaimer():
return render_template('other/disclaimer.html', title="Disclaimer")
@app.route("/settings", methods=["GET"])
def settings():
return render_template('other/settings.html', title="Settings")
@app.route("/privacy", methods=["GET"])
def privacy():
return render_template('other/privacy.html', title="Privacy")
# --Error handling--
@app.errorhandler(404)
def page_not_found(e):
return render_template('other/home.html', title='Home'), 404
# Debug mode.
if __name__ == "__main__":
app.run(debug=True)
| 34.214286
| 90
| 0.671865
|
5742166b5af6d1ee4fddb42db877a9ce76b05fc0
| 5,007
|
py
|
Python
|
airflow/providers/amazon/aws/operators/sagemaker_transform.py
|
wileeam/airflow
|
f46be8152a4d89c57db4ca46f5b3339e4876b723
|
[
"Apache-2.0"
] | 1
|
2020-02-17T17:40:14.000Z
|
2020-02-17T17:40:14.000Z
|
airflow/providers/amazon/aws/operators/sagemaker_transform.py
|
devlocalca/airflow
|
58c3542ed25061320ce61dbe0adf451a44c738dd
|
[
"Apache-2.0"
] | 2
|
2021-05-12T12:41:51.000Z
|
2021-09-29T17:47:43.000Z
|
airflow/providers/amazon/aws/operators/sagemaker_transform.py
|
devlocalca/airflow
|
58c3542ed25061320ce61dbe0adf451a44c738dd
|
[
"Apache-2.0"
] | null | null | null |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.exceptions import AirflowException
from airflow.providers.amazon.aws.hooks.aws_hook import AwsHook
from airflow.providers.amazon.aws.operators.sagemaker_base import SageMakerBaseOperator
from airflow.utils.decorators import apply_defaults
class SageMakerTransformOperator(SageMakerBaseOperator):
"""
Initiate a SageMaker transform job.
This operator returns The ARN of the model created in Amazon SageMaker.
:param config: The configuration necessary to start a transform job (templated).
If you need to create a SageMaker transform job based on an existed SageMaker model::
config = transform_config
If you need to create both SageMaker model and SageMaker Transform job::
config = {
'Model': model_config,
'Transform': transform_config
}
For details of the configuration parameter of transform_config see
:py:meth:`SageMaker.Client.create_transform_job`
For details of the configuration parameter of model_config, See:
:py:meth:`SageMaker.Client.create_model`
:type config: dict
:param aws_conn_id: The AWS connection ID to use.
:type aws_conn_id: str
:param wait_for_completion: Set to True to wait until the transform job finishes.
:type wait_for_completion: bool
:param check_interval: If wait is set to True, the time interval, in seconds,
that this operation waits to check the status of the transform job.
:type check_interval: int
:param max_ingestion_time: If wait is set to True, the operation fails
if the transform job doesn't finish within max_ingestion_time seconds. If you
set this parameter to None, the operation does not timeout.
:type max_ingestion_time: int
"""
@apply_defaults
def __init__(self,
config,
wait_for_completion=True,
check_interval=30,
max_ingestion_time=None,
*args, **kwargs):
super().__init__(config=config,
*args, **kwargs)
self.config = config
self.wait_for_completion = wait_for_completion
self.check_interval = check_interval
self.max_ingestion_time = max_ingestion_time
self.create_integer_fields()
def create_integer_fields(self):
"""Set fields which should be casted to integers."""
self.integer_fields = [
['Transform', 'TransformResources', 'InstanceCount'],
['Transform', 'MaxConcurrentTransforms'],
['Transform', 'MaxPayloadInMB']
]
if 'Transform' not in self.config:
for field in self.integer_fields:
field.pop(0)
def expand_role(self):
if 'Model' not in self.config:
return
config = self.config['Model']
if 'ExecutionRoleArn' in config:
hook = AwsHook(self.aws_conn_id)
config['ExecutionRoleArn'] = hook.expand_role(config['ExecutionRoleArn'])
def execute(self, context):
self.preprocess_config()
model_config = self.config.get('Model')
transform_config = self.config.get('Transform', self.config)
if model_config:
self.log.info('Creating SageMaker Model %s for transform job', model_config['ModelName'])
self.hook.create_model(model_config)
self.log.info('Creating SageMaker transform Job %s.', transform_config['TransformJobName'])
response = self.hook.create_transform_job(
transform_config,
wait_for_completion=self.wait_for_completion,
check_interval=self.check_interval,
max_ingestion_time=self.max_ingestion_time)
if response['ResponseMetadata']['HTTPStatusCode'] != 200:
raise AirflowException('Sagemaker transform Job creation failed: %s' % response)
else:
return {
'Model': self.hook.describe_model(
transform_config['ModelName']
),
'Transform': self.hook.describe_transform_job(
transform_config['TransformJobName']
)
}
| 40.056
| 101
| 0.668664
|
2381aab98f1dfd526eff5eaf3b9dca2bdf602761
| 34,015
|
py
|
Python
|
turfpy/transformation.py
|
omanges/turfpy
|
eda665bb014a8371c42f655f626d6c6b6bf18c65
|
[
"MIT"
] | 82
|
2020-03-21T13:29:02.000Z
|
2022-03-31T11:36:26.000Z
|
turfpy/transformation.py
|
devick/turfpy
|
eda665bb014a8371c42f655f626d6c6b6bf18c65
|
[
"MIT"
] | 51
|
2020-03-21T14:53:46.000Z
|
2022-02-25T07:44:37.000Z
|
turfpy/transformation.py
|
devick/turfpy
|
eda665bb014a8371c42f655f626d6c6b6bf18c65
|
[
"MIT"
] | 22
|
2020-03-21T13:49:12.000Z
|
2022-03-18T05:15:37.000Z
|
"""
This module implements some of the spatial analysis techniques and processes used to
understand the patterns and relationships of geographic features.
This is mainly inspired by turf.js.
link: http://turfjs.org/
"""
import copy
import itertools
import math
from math import floor, sqrt
from typing import List, Optional, Union
import numpy as np
from geojson import Feature, FeatureCollection, LineString, MultiLineString
from geojson import Point as GeoPoint
from geojson import Polygon
from scipy.spatial import Delaunay, Voronoi
from shapely import geometry as geometry
from shapely.geometry import LineString as ShapelyLineString
from shapely.geometry import MultiPoint, MultiPolygon, Point, mapping, shape
from shapely.ops import cascaded_union, clip_by_rect, polygonize, unary_union
from turfpy.helper import get_coord, get_coords, get_geom, get_type, length_to_degrees
from turfpy.measurement import (
bbox,
bbox_polygon,
center,
centroid,
destination,
rhumb_bearing,
rhumb_destination,
rhumb_distance,
)
from turfpy.meta import coord_each, feature_each, flatten_each
from .dev_lib.earcut import earcut
from .dev_lib.spline import Spline
def circle(
center: Feature, radius: int, steps: int = 64, units: str = "km", **kwargs
) -> Polygon:
"""
Takes a Point and calculates the circle polygon given a radius in degrees,
radians, miles, or kilometers; and steps for precision.
:param center: A `Point` object representing center point of circle.
:param radius: An int representing radius of the circle.
:param steps: An int representing number of steps.
:param units: A string representing units of distance e.g. 'mi', 'km',
'deg' and 'rad'.
:param kwargs: A dict representing additional properties.
:return: A polygon feature object.
Example:
>>> from turfpy.transformation import circle
>>> from geojson import Feature, Point
>>> circle(center=Feature(geometry=Point((-75.343, 39.984))), radius=5, steps=10)
"""
coordinates = []
options = dict(steps=steps, units=units)
options.update(kwargs)
for i in range(steps):
bearing = i * -360 / steps
pt = destination(center, radius, bearing, options=options)
cords = pt.geometry.coordinates
coordinates.append(cords)
coordinates.append(coordinates[0])
return Feature(geometry=Polygon([coordinates], **kwargs))
def bbox_clip(geojson: Feature, bbox: list) -> Feature:
"""
Takes a Feature or geometry and a bbox and clips the feature to the bbox
:param geojson: Geojson data
:param bbox: Bounding Box which is used to clip the geojson
:return: Clipped geojson
Example:
>>> from turfpy.transformation import bbox_clip
>>> from geojson import Feature
>>> f = Feature(geometry={"coordinates": [[[2, 2], [8, 4],
>>> [12, 8], [3, 7], [2, 2]]], "type": "Polygon"})
>>> bbox = [0, 0, 10, 10]
>>> clip = bbox_clip(f, bbox)
"""
bb_polygon = bbox_polygon(bbox)
bb_clip = intersect([geojson, bb_polygon])
if not bb_clip:
return bb_clip
if "properties" in geojson:
bb_clip.properties = geojson["properties"]
return bb_clip
def intersect(features: Union[List[Feature], FeatureCollection]) -> Feature:
"""
Takes polygons and finds their intersection
:param features: List of features of Feature Collection
:return: Intersection Geojson Feature
Example:
>>> from turfpy.transformation import intersect
>>> from geojson import Feature
>>> f = Feature(geometry={"coordinates": [
>>> [[-122.801742, 45.48565], [-122.801742, 45.60491],
>>> [-122.584762, 45.60491], [-122.584762, 45.48565],
>>> [-122.801742, 45.48565]]], "type": "Polygon"})
>>> b = Feature(geometry={"coordinates": [
>>> [[-122.520217, 45.535693], [-122.64038, 45.553967],
>>> [-122.720031, 45.526554], [-122.669906, 45.507309],
>>> [-122.723464, 45.446643], [-122.532577, 45.408574],
>>> [-122.487258, 45.477466], [-122.520217, 45.535693]
>>> ]], "type": "Polygon"})
>>> inter = intersect([f, b])
"""
properties_list = []
if isinstance(features, list):
shapes = []
for f in features:
poly = get_geom(f)
s = shape(poly)
shapes.append(s)
if "properties" in f.keys():
properties_list.append(f["properties"])
else:
if "features" not in features.keys():
raise Exception("Invalid FeatureCollection")
if "properties" in features.keys():
properties_list.append(features["properties"])
shapes = []
for f in features["features"]:
poly = get_geom(f)
s = shape(poly)
shapes.append(s)
if "properties" in f.keys():
properties_list.append(f["properties"])
intersection = shapes[0]
for shape_value in shapes:
intersection = shape_value.intersection(intersection)
intersection = mapping(intersection)
if (
len(intersection.get("coordinates", [])) == 0
and len(intersection.get("geometries", [])) == 0
):
return None
properties = merge_dict(properties_list)
intersection_feature = Feature(geometry=intersection, properties=properties)
return intersection_feature
def bezier_spline(line: Feature, resolution=10000, sharpness=0.85) -> Feature:
"""
Takes a line and returns a curved version by applying a Bezier spline algorithm
:param line: LineString Feature which is used to draw the curve
:param resolution: time in milliseconds between points
:param sharpness: a measure of how curvy the path should be between splines
:return: Curve as LineString Feature
Example:
>>> from geojson import LineString, Feature
>>> from turfpy.transformation import bezier_spline
>>> ls = LineString([(-76.091308, 18.427501),
>>> (-76.695556, 18.729501),
>>> (-76.552734, 19.40443),
>>> (-74.61914, 19.134789),
>>> (-73.652343, 20.07657),
>>> (-73.157958, 20.210656)])
>>> f = Feature(geometry=ls)
>>> bezier_spline(f)
"""
coords = []
points = []
geom = get_geom(line)
for c in geom["coordinates"]:
points.append({"x": c[0], "y": c[1]})
spline = Spline(points_data=points, resolution=resolution, sharpness=sharpness)
i = 0
while i < spline.duration:
pos = spline.pos(i)
if floor(i / 100) % 2 == 0:
coords.append((pos["x"], pos["y"]))
i = i + 10
return Feature(geometry=LineString(coords))
def merge_dict(dicts: list):
super_dict: dict = {}
for d in dicts:
for k, v in d.items():
if k not in super_dict.keys():
super_dict[k] = v
else:
if isinstance(super_dict[k], list):
if v not in super_dict[k]:
super_dict[k].append(v)
else:
if super_dict[k] != v:
super_dict[k] = [super_dict[k], v]
return super_dict
def union(
features: Union[List[Feature], FeatureCollection]
) -> Union[Feature, FeatureCollection]:
"""
Given list of features or ``FeatureCollection`` return union of those.
:param features: A list of GeoJSON features or FeatureCollection.
:return: A GeoJSON Feature or FeatureCollection.
Example:
>>> from turfpy.transformation import union
>>> from geojson import Feature, Polygon, FeatureCollection
>>> f1 = Feature(geometry=Polygon([[
... [-82.574787, 35.594087],
... [-82.574787, 35.615581],
... [-82.545261, 35.615581],
... [-82.545261, 35.594087],
... [-82.574787, 35.594087]
... ]]), properties={"fill": "#00f"})
>>> f2 = Feature(geometry=Polygon([[
... [-82.560024, 35.585153],
... [-82.560024, 35.602602],
... [-82.52964, 35.602602],
... [-82.52964, 35.585153],
... [-82.560024, 35.585153]]]), properties={"fill": "#00f"})
>>> union(FeatureCollection([f1, f2], properties={"combine": "yes"}))
"""
shapes = []
properties_list = []
if isinstance(features, list):
for f in features:
if f.type != "Feature":
raise Exception("Not a valid feature")
geom = get_geom(f)
s = shape(geom)
shapes.append(s)
if "properties" in f.keys():
properties_list.append(f["properties"])
else:
if "features" not in features.keys():
raise Exception("Invalid FeatureCollection")
if "properties" in features.keys():
properties_list.append(features["properties"])
for f in features["features"]:
geom = get_geom(f)
s = shape(geom)
shapes.append(s)
if "properties" in f.keys():
properties_list.append(f["properties"])
result = cascaded_union(shapes)
result = mapping(result)
properties = merge_dict(properties_list)
if result["type"] == "GeometryCollection":
features = []
for geom in result["geometries"]:
features.append(Feature(geometry=geom))
return FeatureCollection(features, properties=properties)
return Feature(geometry=result, properties=properties)
def _alpha_shape(points, alpha):
"""
Compute the alpha shape (concave hull) of a set of points.
:param points: Iterable container of points.
:param alpha: alpha value to influence the gooeyness of the border. Smaller
numbers don't fall inward as much as larger numbers. Too large,
and you lose everything!
"""
if len(points) < 4:
# When you have a triangle, there is no sense in computing an alpha
# shape.
return geometry.MultiPoint(list(points)).convex_hull
def add_edge(edges, edge_points, coords, i, j):
"""Add a line between the i-th and j-th points, if not in the list already"""
if (i, j) in edges or (j, i) in edges:
# already added
return
edges.add((i, j))
edge_points.append(coords[[i, j]])
coords = np.array([point.coords[0] for point in points])
tri = Delaunay(coords)
edges = set()
edge_points = []
# loop over triangles:
# ia, ib, ic = indices of corner points of the triangle
for ia, ib, ic in tri.vertices:
pa = coords[ia]
pb = coords[ib]
pc = coords[ic]
# Lengths of sides of triangle
a = math.sqrt((pa[0] - pb[0]) ** 2 + (pa[1] - pb[1]) ** 2)
b = math.sqrt((pb[0] - pc[0]) ** 2 + (pb[1] - pc[1]) ** 2)
c = math.sqrt((pc[0] - pa[0]) ** 2 + (pc[1] - pa[1]) ** 2)
# Semiperimeter of triangle
s = (a + b + c) / 2.0
# Area of triangle by Heron's formula
area = math.sqrt(s * (s - a) * (s - b) * (s - c))
if area > 0:
circum_r = a * b * c / (4.0 * area)
else:
circum_r = 0
# Here's the radius filter.
# print circum_r
if circum_r < 1.0 / alpha:
add_edge(edges, edge_points, coords, ia, ib)
add_edge(edges, edge_points, coords, ib, ic)
add_edge(edges, edge_points, coords, ic, ia)
m = geometry.MultiLineString(edge_points)
triangles = list(polygonize(m))
return cascaded_union(triangles), edge_points
def get_points(features):
points = []
if "type" not in features.keys():
raise Exception("Invalid Feature")
if features["type"] == "Feature":
get_ext_points(geometry.shape(features["geometry"]), points)
else:
if "features" not in features.keys():
raise Exception("Invalid FeatureCollection")
for feature in features["features"]:
get_ext_points(geometry.shape(feature["geometry"]), points)
return points
def get_ext_points(geom, points):
if geom.type == "Point":
for p in geom.coords:
points.append(Point(p))
elif geom.type == "MultiPoint":
for p in geom.geoms:
points.append(p)
elif geom.type == "LineString":
for p in geom.coords:
points.append(Point(p))
elif geom.type == "MultiLineString":
for g in geom.geoms:
for p in g.coords:
points.append(Point(p))
elif geom.type == "Polygon":
for p in geom.exterior.coords:
points.append(Point(p))
elif geom.type == "MultiPolygon":
for g in geom.geoms:
for p in g.exterior.coords:
points.append(Point(p))
else:
raise Exception("Invalid Geometry")
def concave(features: Union[Feature, FeatureCollection], alpha=2):
"""Generate concave hull for the given feature or Feature Collection.
:param features: It can be a feature or Feature Collection
:param alpha: Alpha determines the shape of concave hull,
greater values will make shape more tighten
:return: Feature of concave hull polygon
Example:
>>> from turfpy.transformation import concave
>>> from geojson import FeatureCollection, Feature, Point
>>> f1 = Feature(geometry=Point((-63.601226, 44.642643)))
>>> f2 = Feature(geometry=Point((-63.591442, 44.651436)))
>>> f3 = Feature(geometry=Point((-63.580799, 44.648749)))
>>> f4 = Feature(geometry=Point((-63.573589, 44.641788)))
>>> f5 = Feature(geometry=Point((-63.587665, 44.64533)))
>>> f6 = Feature(geometry=Point((-63.595218, 44.64765)))
>>> fc = [f1, f2, f3, f4, f5, f6]
>>> concave(FeatureCollection(fc), alpha=100)
"""
points = get_points(features)
concave_hull, edges = _alpha_shape(points, alpha)
return Feature(geometry=mapping(concave_hull))
def convex(features: Union[Feature, FeatureCollection]):
"""Generate convex hull for the given feature or Feature Collection
:param features: It can be a feature or Feature Collection
:return: Feature of convex hull polygon
Example:
>>> from turfpy.transformation import convex
>>> from geojson import FeatureCollection, Feature, Point
>>> f1 = Feature(geometry=Point((10.195312, 43.755225)))
>>> f2 = Feature(geometry=Point((10.404052, 43.8424511)))
>>> f3 = Feature(geometry=Point((10.579833, 43.659924)))
>>> f4 = Feature(geometry=Point((10.360107, 43.516688)))
>>> f5 = Feature(geometry=Point((10.14038, 43.588348)))
>>> f6 = Feature(geometry=Point((10.195312, 43.755225)))
>>> fc = [f1, f2, f3, f4, f5, f6]
>>> convex(FeatureCollection(fc))
"""
points = get_points(features)
point_collection = geometry.MultiPoint(list(points))
return Feature(geometry=mapping(point_collection.convex_hull))
def dissolve(
features: Union[List[Feature], FeatureCollection], property_name: str = None
) -> FeatureCollection:
"""
Take FeatureCollection or list of features to dissolve based on
property_name provided.
:param features: A list of GeoJSON features or FeatureCollection.
:param property_name: Name of property based on which to dissolve.
:return: A GeoJSON Feature or FeatureCollection.
Example:
>>> from geojson import Polygon, Feature, FeatureCollection
>>> from turfpy.transformation import dissolve
>>> f1 = Feature(geometry=Polygon([[
>>> [0, 0],
>>> [0, 1],
>>> [1, 1],
>>> [1, 0],
>>> [0, 0]]]), properties={"combine": "yes", "fill": "#00f"})
>>> f2 = Feature(geometry=Polygon([[
>>> [0, -1],
>>> [0, 0],
>>> [1, 0],
>>> [1, -1],
>>> [0,-1]]]), properties={"combine": "yes"})
>>> f3 = Feature(geometry=Polygon([[
>>> [1,-1],
>>> [1, 0],
>>> [2, 0],
>>> [2, -1],
>>> [1, -1]]]), properties={"combine": "no"})
>>> dissolve(FeatureCollection([f1, f2, f3]), property_name='combine')
"""
if isinstance(features, list):
features = FeatureCollection(features)
if "features" not in features.keys():
raise Exception("Invalid FeatureCollection")
dissolve_feature_list = []
if property_name:
for k, g in itertools.groupby(
features["features"], key=lambda x: x["properties"].get(property_name)
):
fc = FeatureCollection(list(g))
# if "properties" in features.keys():
# fc['properties'] = features['properties']
result = union(fc)
if result["type"] == "FeatureCollection":
for f in result["features"]:
dissolve_feature_list.append(f)
else:
dissolve_feature_list.append(result)
else:
return union(features)
if "properties" in features.keys():
return FeatureCollection(dissolve_feature_list, properties=features["properties"])
else:
return FeatureCollection(dissolve_feature_list)
def difference(feature_1: Feature, feature_2: Feature) -> Feature:
"""
Find the difference between given two features.
:param feature_1: A GeoJSON feature
:param feature_2: A GeoJSON feature
:return: A GeoJSON feature
Example:
>>> from geojson import Polygon, Feature
>>> from turfpy.transformation import difference
>>> f1 = Feature(geometry=Polygon([[
>>> [128, -26],
>>> [141, -26],
>>> [141, -21],
>>> [128, -21],
>>> [128, -26]]]), properties={"combine": "yes", "fill": "#00f"})
>>> f2 = Feature(geometry=Polygon([[
>>> [126, -28],
>>> [140, -28],
>>> [140, -20],
>>> [126, -20],
>>> [126, -28]]]), properties={"combine": "yes"})
>>> difference(f1, f2)
"""
properties_list = []
if "properties" in feature_1.keys():
properties_list.append(feature_1["properties"])
if "properties" in feature_2.keys():
properties_list.append(feature_2["properties"])
shape_1 = shape(get_geom(feature_1))
shape_2 = shape(get_geom(feature_2))
difference_result = shape_1.difference(shape_2)
difference_result = mapping(difference_result)
if len(difference_result["coordinates"]) == 0:
return None
properties = merge_dict(properties_list)
difference_feature = Feature(geometry=difference_result, properties=properties)
return difference_feature
def transform_rotate(
feature: Union[List[Feature], FeatureCollection],
angle: float,
pivot: list = None,
mutate: bool = False,
):
"""
Rotates any geojson Feature or Geometry of a specified angle,
around its centroid or a given pivot
point; all rotations follow the right-hand rule.
:param feature: Geojson to be rotated.
:param angle: angle of rotation (along the vertical axis),
from North in decimal degrees, negative clockwise
:param pivot: point around which the rotation will be performed
:param mutate: allows GeoJSON input to be mutated
(significant performance increase if True)
:return: the rotated GeoJSON
Example :-
>>> from turfpy.transformation import transform_rotate
>>> from geojson import Polygon, Feature
>>> f = Feature(geometry=Polygon([[[0,29],[3.5,29],[2.5,32],[0,29]]]))
>>> pivot = [0, 25]
>>> transform_rotate(f, 10, pivot)
"""
if not feature:
raise Exception("geojson is required")
if angle == 0:
return feature
if not pivot:
pivot = centroid(feature)["geometry"]["coordinates"]
if not mutate:
feature = copy.deepcopy(feature)
def _callback_coord_each(
coord, coord_index, feature_index, multi_feature_index, geometry_index
):
nonlocal pivot, angle
initial_angle = rhumb_bearing(GeoPoint(pivot), GeoPoint(coord))
final_angle = initial_angle + angle
distance = rhumb_distance(GeoPoint(pivot), GeoPoint(coord))
new_coords = get_coord(rhumb_destination(GeoPoint(pivot), distance, final_angle))
coord[0] = new_coords[0]
coord[1] = new_coords[1]
coord_each(feature, _callback_coord_each)
return feature
def transform_translate(
feature: Union[List[Feature], FeatureCollection],
distance: float,
direction: float,
units: str = "km",
z_translation: float = 0,
mutate: bool = False,
):
"""
Moves any geojson Feature or Geometry
of a specified distance along a
Rhumb Line on the provided direction angle.
:param feature: Geojson data that is to be translated
:param distance: length of the motion;
negative values determine motion in opposite direction
:param direction: of the motion; angle
from North in decimal degrees, positive clockwise
:param units: units for the distance and z_translation
:param z_translation: length of the vertical motion, same unit of distance
:param mutate: allows GeoJSON input to be mutated
(significant performance increase if true)
:return: the translated GeoJSON
Example :-
>>> from turfpy.transformation import transform_translate
>>> from geojson import Polygon, Feature
>>> f = Feature(geometry=Polygon([[[0,29],[3.5,29],[2.5,32],[0,29]]]))
>>> transform_translate(f, 100, 35, mutate=True)
"""
if not feature:
raise Exception("geojson is required")
if not distance:
raise Exception("distance is required")
if distance == 0 and z_translation == 0:
return feature
if not direction:
raise Exception("direction is required")
if distance < 0:
distance = -distance
direction = direction + 180
if not mutate:
feature = copy.deepcopy(feature)
def _callback_coord_each(
coord, coord_index, feature_index, multi_feature_index, geometry_index
):
nonlocal distance, direction, units, z_translation
new_coords = get_coord(
rhumb_destination(GeoPoint(coord), distance, direction, {"units": units})
)
coord[0] = new_coords[0]
coord[1] = new_coords[1]
if z_translation and len(coord) == 3:
coord[2] += z_translation
coord_each(feature, _callback_coord_each)
return feature
def transform_scale(
features,
factor: float,
origin: Union[str, list] = "centroid",
mutate: bool = False,
):
"""
Scale a GeoJSON from a given
point by a factor of scaling
(ex: factor=2 would make the GeoJSON 200% larger).
If a FeatureCollection is provided, the origin
point will be calculated based on each individual Feature.
:param features: GeoJSON to be scaled
:param factor: of scaling, positive or negative values greater than 0
:param origin: Point from which the scaling will occur
(string options: sw/se/nw/ne/center/centroid)
:param mutate: allows GeoJSON input to be mutated
(significant performance increase if true)
:return: Scaled Geojson
Example :-
>>> from turfpy.transformation import transform_scale
>>> from geojson import Polygon, Feature
>>> f = Feature(geometry=Polygon([[[0,29],[3.5,29],[2.5,32],[0,29]]]))
>>> transform_scale(f, 3, origin=[0, 29])
"""
if not features:
raise Exception("geojson is required")
if not factor:
raise Exception("invalid factor")
if not mutate:
features = copy.deepcopy(features)
if features["type"] == "FeatureCollection":
def _callback_feature_each(feature, feature_index):
nonlocal factor, origin, features
features["features"][feature_index] = scale(feature, factor, origin)
feature_each(features, _callback_feature_each)
return features
return scale(features, factor, origin)
def scale(feature, factor, origin):
is_point = get_type(feature) == "Point"
origin = define_origin(feature, origin)
if factor == 1 or is_point:
return feature
def _callback_coord_each(
coord, coord_index, feature_index, multi_feature_index, geometry_index
):
nonlocal factor, origin
original_distance = rhumb_distance(GeoPoint(origin), GeoPoint(coord))
bearing = rhumb_bearing(GeoPoint(origin), GeoPoint(coord))
new_distance = original_distance * factor
new_coord = get_coord(rhumb_destination(GeoPoint(origin), new_distance, bearing))
coord[0] = new_coord[0]
coord[1] = new_coord[1]
if len(coord) == 3:
coord[2] = coord[2] * factor
coord_each(feature, _callback_coord_each)
return feature
def define_origin(geojson, origin):
if not origin:
origin = "centroid"
if isinstance(origin, list):
return get_coord(origin)
bb = bbox(geojson)
west = bb[0]
south = bb[1]
east = bb[2]
north = bb[3]
if (
origin == "sw"
or origin == "southwest"
or origin == "westsouth"
or origin == "bottomleft"
):
return [west, south]
elif (
origin == "se"
or origin == "southeast"
or origin == "eastsouth"
or origin == "bottomright"
):
return [east, south]
elif (
origin == "nw"
or origin == "northwest"
or origin == "westnorth"
or origin == "topleft"
):
return [west, north]
elif (
origin == "ne"
or origin == "northeast"
or origin == "eastnorth"
or origin == "topright"
):
return [east, north]
elif origin == "center":
return center(geojson)["geometry"]["coordinates"]
elif origin is None or origin == "centroid":
return centroid(geojson)["geometry"]["coordinates"]
else:
raise Exception("invalid origin")
def tesselate(poly: Feature) -> FeatureCollection:
"""Tesselates a Feature into a FeatureCollection of triangles using earcut.
:param poly: A GeoJSON feature ``class:geojson.Polygon``.
:return: A GeoJSON FeatureCollection of triangular polygons.
Example:
>>> from geojson import Feature
>>> from turfpy.transformation import tesselate
>>> polygon = Feature(geometry={"coordinates": [[[11, 0], [22, 4], [31, 0], [31, 11],
... [21, 15], [11, 11], [11, 0]]], "type": "Polygon"})
>>> tesselate(polygon)
"""
if (
poly["geometry"]["type"] != "Polygon"
and poly["geometry"]["type"] != "MultiPolygon"
):
raise ValueError("Geometry must be Polygon or MultiPolygon")
fc = FeatureCollection([])
if poly.geometry.type == "Polygon":
fc["features"] = __process_polygon(poly.geometry.coordinates)
else:
for co in poly.geometry.coordinates:
fc["features"].extend(__process_polygon(co))
return fc
def __process_polygon(coordinates):
data = __flatten_coords(coordinates)
dim = 2
result = earcut(data["vertices"], data["holes"], dim)
features = []
vertices = []
for i, val in enumerate(result):
index = val
vertices.append(
[data["vertices"][index * dim], data["vertices"][index * dim + 1]]
)
i = 0
while i < len(vertices):
coords = vertices[i : i + 3]
coords.append(vertices[i])
features.append(Feature(geometry={"coordinates": [coords], "type": "Polygon"}))
i += 3
return features
def __flatten_coords(data):
dim = len(data[0][0])
result = {"vertices": [], "holes": [], "dimensions": dim}
hole_index = 0
for i, val in enumerate(data):
for j, _ in enumerate(val):
for d in range(dim):
result["vertices"].append(data[i][j][d])
if i > 0:
hole_index += len(data[i - 1])
result["holes"].append(hole_index)
return result
def line_offset(geojson: Feature, distance: float, unit: str = "km") -> Feature:
"""
Takes a linestring or multilinestring and returns
a line at offset by the specified distance.
:param geojson: input GeoJSON
:param distance: distance to offset the line (can be of negative value)
:param unit: Units in which distance to be calculated, values can be 'deg', 'rad',
'mi', 'km', default is 'km'
:return: Line feature offset from the input line
Example:
>>> from geojson import MultiLineString, Feature
>>> from turfpy.transformation import line_offset
>>> ls = Feature(geometry=MultiLineString([
... [(3.75, 9.25), (-130.95, 1.52)],
... [(23.15, -34.25), (-1.35, -4.65), (3.45, 77.95)]
... ]))
>>> line_offset(ls, 2, unit='mi')
"""
if not geojson:
raise Exception("geojson is required")
if not distance:
raise Exception("distance is required")
type = get_type(geojson)
properties = geojson.get("properties", {})
if type == "LineString":
return line_offset_feature(geojson, distance, unit)
elif type == "MultiLineString":
coords = []
def callback_flatten_each(feature, feature_index, multi_feature_index):
nonlocal coords
coords.append(
line_offset_feature(feature, distance, unit).geometry.coordinates
)
return True
flatten_each(geojson, callback_flatten_each)
return Feature(geometry=MultiLineString(coords), properties=properties)
def line_offset_feature(line, distance, units):
segments = []
offset_degrees = length_to_degrees(distance, units)
coords = get_coords(line)
final_coords = []
for index, current_coords in enumerate(coords):
if index != len(coords) - 1:
segment = _process_segment(current_coords, coords[index + 1], offset_degrees)
segments.append(segment)
if index > 0:
seg2_coords = segments[index - 1]
intersects = _intersection(segment, seg2_coords)
if intersects:
seg2_coords[1] = intersects
segment[0] = intersects
final_coords.append(seg2_coords[0])
if index == len(coords) - 2:
final_coords.append(segment[0])
final_coords.append(segment[1])
if len(coords) == 2:
final_coords.append(segment[0])
final_coords.append(segment[1])
return Feature(
geometry=LineString(final_coords), properties=line.get("properties", {})
)
def _process_segment(point1, point2, offset):
L = sqrt(
(point1[0] - point2[0]) * (point1[0] - point2[0])
+ (point1[1] - point2[1]) * (point1[1] - point2[1])
)
out1x = point1[0] + offset * (point2[1] - point1[1]) / L
out2x = point2[0] + offset * (point2[1] - point1[1]) / L
out1y = point1[1] + offset * (point1[0] - point2[0]) / L
out2y = point2[1] + offset * (point1[0] - point2[0]) / L
return [[out1x, out1y], [out2x, out2y]]
def _intersection(a, b):
if _is_parallel(a, b):
return False
return _intersect_segments(a, b)
def _is_parallel(a, b):
r = _ab(a)
s = _ab(b)
return _cross_product(r, s) == 0
def _ab(segment):
start = segment[0]
end = segment[1]
return [end[0] - start[0], end[1] - start[1]]
def _cross_product(v1, v2):
return (v1[0] * v2[1]) - (v2[0] * v1[1])
def _intersect_segments(a, b):
p = a[0]
r = _ab(a)
q = b[0]
s = _ab(b)
cross = _cross_product(r, s)
qmp = _sub(q, p)
numerator = _cross_product(qmp, s)
t = numerator / cross
intersection = _add(p, _scalar_mult(t, r))
return intersection
def _add(v1, v2):
return [v1[0] + v2[0], v1[1] + v2[1]]
def _sub(v1, v2):
return [v1[0] - v2[0], v1[1] - v2[1]]
def _scalar_mult(s, v):
return [s * v[0], s * v[1]]
def voronoi(
points: Union[FeatureCollection, List], bbox: Optional[list] = None
) -> Feature:
"""Takes a FeatureCollection of points, and a bounding box,
and returns a FeatureCollection of Voronoi polygons.
:param points: To find the Voronoi polygons around. Points should be either
FeatureCollection of points or list of points.
:param bbox: A bounding box to clip.
:return: A GeoJSON Feature.
Example:
>>> from turfpy.transformation import voronoi
>>> points = [
... [-66.9703, 40.3183],
... [-63.7763, 40.4500],
... [-65.4196, 42.13985310302137],
... [-69.5813, 43.95405461286195],
... [-65.66337553550034, 55.97088945355232],
... [-60.280418548905, 56.240669185466146],
... [-68.5129561347689, 50.12984589640148],
... [-64.2393519226657, 59.66235385923687],
... ]
>>> bbox = [-70, 40, -60, 60]
>>> voronoi(points, bbox)
"""
if isinstance(points, FeatureCollection):
coords = []
for feature in points["features"]:
coords.append(feature["features"][0]["geometry"]["coordinates"])
points = np.array(coords)
elif isinstance(points, list):
points = np.array(points)
else:
raise ValueError(
"points should be either FeatureCollection of points of List of Points"
)
vor = Voronoi(points)
lines = [
ShapelyLineString(vor.vertices[line])
for line in vor.ridge_vertices
if -1 not in line
]
convex_hull = MultiPoint([Point(i) for i in points]).convex_hull.buffer(2)
result = MultiPolygon([poly.intersection(convex_hull) for poly in polygonize(lines)])
result = MultiPolygon(
[p for p in result] + [p for p in convex_hull.difference(unary_union(result))]
)
if bbox is not None:
w, s, e, n = bbox
cliped_result = clip_by_rect(result, w, s, e, n)
return Feature(geometry=cliped_result)
return Feature(geometry=result)
| 31.849251
| 90
| 0.609731
|
eb25c193c1d9d6adfae383eeb4b768722e5e7892
| 685
|
py
|
Python
|
pluto/data/traffic/storage/collections/equity_metadata.py
|
chalant/pluto
|
e7bfd35a2c1fc0e0753bd2f840b0a4385b5124fc
|
[
"Apache-2.0"
] | null | null | null |
pluto/data/traffic/storage/collections/equity_metadata.py
|
chalant/pluto
|
e7bfd35a2c1fc0e0753bd2f840b0a4385b5124fc
|
[
"Apache-2.0"
] | null | null | null |
pluto/data/traffic/storage/collections/equity_metadata.py
|
chalant/pluto
|
e7bfd35a2c1fc0e0753bd2f840b0a4385b5124fc
|
[
"Apache-2.0"
] | null | null | null |
from pluto.data.traffic.storage.database.mongo_utils import get_collection
_META_DATA = get_collection('AvailableTickers')
_TICKERS = get_collection('Tickers')
'''keeps track of what to download and maintain... and the symbols in general...'''
class MetaData(object):
def __init__(self):
self._observers = []
def sid(self,symbol):
'''if there is no sids, generate sids for each symbol'''
pass
def update(self,symbol,**kwargs):
pass
def reset(self,symbol):
'''the ingester might need to reset'''
'''resets data...'''
for observer in self._observers:
observer.reset(symbol)
def subscribe(self,equity_collection):
self._observers.append(equity_collection)
| 24.464286
| 83
| 0.732847
|
91a95719089a488be6e23bc09b470e3c69d1df79
| 2,849
|
py
|
Python
|
pymoo/optimize.py
|
AIasd/pymoo
|
08705ca866367d9fab675c30ffe585c837df9654
|
[
"Apache-2.0"
] | 11
|
2018-05-22T17:38:02.000Z
|
2022-02-28T03:34:33.000Z
|
pymoo/optimize.py
|
AIasd/pymoo
|
08705ca866367d9fab675c30ffe585c837df9654
|
[
"Apache-2.0"
] | 15
|
2022-01-03T19:36:36.000Z
|
2022-03-30T03:57:58.000Z
|
pymoo/optimize.py
|
AIasd/pymoo
|
08705ca866367d9fab675c30ffe585c837df9654
|
[
"Apache-2.0"
] | 3
|
2021-11-22T08:01:47.000Z
|
2022-03-11T08:53:58.000Z
|
import copy
from pymoo.factory import get_termination
from pymoo.model.termination import Termination
from pymoo.util.termination.default import MultiObjectiveDefaultTermination, SingleObjectiveDefaultTermination
def minimize(problem,
algorithm,
termination=None,
**kwargs):
"""
Minimization of function of one or more variables, objectives and constraints.
This is used as a convenience function to execute several algorithms with default settings which turned
out to work for a test single. However, evolutionary computations utilizes the idea of customizing a
meta-algorithm. Customizing the algorithm using the object oriented interface is recommended to improve the
convergence.
Parameters
----------
problem : :class:`~pymoo.model.problem.Problem`
A problem object which is defined using pymoo.
algorithm : :class:`~pymoo.model.algorithm.Algorithm`
The algorithm object that should be used for the optimization.
termination : :class:`~pymoo.model.termination.Termination` or tuple
The termination criterion that is used to stop the algorithm.
seed : integer
The random seed to be used.
verbose : bool
Whether output should be printed or not.
display : :class:`~pymoo.util.display.Display`
Each algorithm has a default display object for printouts. However, it can be overwritten if desired.
callback : :class:`~pymoo.model.callback.Callback`
A callback object which is called each iteration of the algorithm.
save_history : bool
Whether the history should be stored or not.
Returns
-------
res : :class:`~pymoo.model.result.Result`
The optimization result represented as an object.
"""
# create a copy of the algorithm object to ensure no side-effects
algorithm = copy.deepcopy(algorithm)
# get the termination if provided as a tuple - create an object
if termination is not None and not isinstance(termination, Termination):
if isinstance(termination, str):
termination = get_termination(termination)
else:
termination = get_termination(*termination)
# initialize the algorithm object given a problem
algorithm.initialize(problem, termination=termination, **kwargs)
# if no termination could be found add the default termination either for single or multi objective
if algorithm.termination is None:
if problem.n_obj > 1:
algorithm.termination = MultiObjectiveDefaultTermination()
else:
algorithm.termination = SingleObjectiveDefaultTermination()
# actually execute the algorithm
res = algorithm.solve()
# store the deep copied algorithm in the result object
res.algorithm = algorithm
return res
| 34.743902
| 111
| 0.710425
|
f61ac3aaf04f777d94d5a5c1e19344f17f9296a5
| 1,337
|
py
|
Python
|
rosshow/viewers/nav_msgs/OccupancyGridViewer.py
|
dheera/rosstorm
|
463f6065c481ed1dfae2dc5c9b66a00356eb6fe9
|
[
"BSD-3-Clause"
] | 868
|
2017-07-14T08:37:50.000Z
|
2022-03-27T15:30:22.000Z
|
rosshow/viewers/nav_msgs/OccupancyGridViewer.py
|
dheera/rosstorm
|
463f6065c481ed1dfae2dc5c9b66a00356eb6fe9
|
[
"BSD-3-Clause"
] | 19
|
2017-07-14T12:02:19.000Z
|
2022-01-24T16:50:22.000Z
|
rosshow/viewers/nav_msgs/OccupancyGridViewer.py
|
dheera/rosstorm
|
463f6065c481ed1dfae2dc5c9b66a00356eb6fe9
|
[
"BSD-3-Clause"
] | 70
|
2017-07-14T22:26:20.000Z
|
2022-03-22T07:13:12.000Z
|
import numpy as np
import time
import PIL.Image
import rosshow.termgraphics as termgraphics
from rosshow.viewers.generic.GenericImageViewer import GenericImageViewer
class OccupancyGridViewer(GenericImageViewer):
def __init__(self, canvas, title = ""):
def msg_decoder(msg):
"""
Decodes a nav_msgs/OccupancyGrid ROS message into a numpy H x W x 3 RGB visualization.
Values <0 are shown in yellow
Values >100 are shown in red
0-100 are probabilities and shown as white to black
"""
occupancy_map = np.array(self.msg.data, dtype=np.int16).reshape(self.msg.info.height, self.msg.info.width)[::-1, :]
color_prob_zero = np.array([0, 0, 0], dtype=np.uint8)
color_prob_one = np.array([255, 255, 255], dtype=np.uint8)
map_image = ((100 - occupancy_map) * 10 // 4).astype(np.uint8) # *10//4 is int approx to *255.0/100.0
map_image = np.stack((map_image,)*3, axis = -1) # greyscale to rgb
# display <0 in yellow
map_image[occupancy_map < 0] = [255, 127, 0]
# display >100 in red
map_image[occupancy_map > 100] = [255, 0, 0]
return map_image
GenericImageViewer.__init__(self, canvas, msg_decoder = msg_decoder, title = title)
| 37.138889
| 127
| 0.623785
|
cdbae39c891c0f744c3d055b2a74237a5bc2dfc8
| 2,459
|
py
|
Python
|
scripts/visualisations/compare_auc.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
scripts/visualisations/compare_auc.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
scripts/visualisations/compare_auc.py
|
pirakd/DeepProp
|
e43f6e12220da38a3bda51918bd75bb7c48dec31
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
from os import path, makedirs
from utils import get_root_path, get_time
import json
from cycler import cycler
import seaborn
root_path = get_root_path()
output_folder = 'output'
output_file_path = path.join(get_root_path(), output_folder, path.basename(__file__).split('.')[0], get_time())
makedirs(output_file_path, exist_ok=True)
deep_results_file_path = path.join(root_path, 'input', 'results', 'drug_kegg_0')
d2d_results_file_path = path.join(root_path, 'input', 'results', 'd2d_dummy')
with open(path.join(deep_results_file_path, 'results'), 'r' ) as f:
deep_results_dict = json.load(f)['deep']
with open(path.join(deep_results_file_path, 'args'), 'r' ) as f:
args = json.load(f)
with open(path.join(d2d_results_file_path, 'results'), 'r' ) as f:
all_results = json.load(f)
d2d_results = all_results['d2d']
d2d_2_results = all_results['d2d_2']
n_folds = len(deep_results_dict['folds_stats'])
n_experiments = 'all'
for source_type in deep_results_dict['final'].keys():
plt.plot([0, 1], [0.5, 0.5], '--', color=(0.8, 0.8, 0.8), label='random')
plt.plot(deep_results_dict['final'][source_type]['recall'], deep_results_dict['final'][source_type]['precision'],
'o--', label='DeepProp (%0.2f)' % deep_results_dict['final'][source_type]['auc'], lw=2,
markersize=3) # , markeredgecolor = 'dimgrey')
plt.plot(d2d_results['final'][source_type]['recall'], d2d_results['final'][source_type]['precision'], 'o--', label='D2D (%0.2f)' % d2d_results['final'][source_type]['auc'], lw=2,
markersize=3) # , markeredgecolor = 'dimgrey')
plt.plot(d2d_2_results['final'][source_type]['recall'], d2d_2_results['final'][source_type]['precision'], 'o--', label='D2D Deconstructed (%0.2f)' % d2d_2_results['final'][source_type]['auc'], lw=2,
markersize=3) # , markeredgecolor = 'dimgrey')
plt.xlim([0, 1])
plt.ylim([0, 1])
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.legend(loc="lower right")
plt.title('{}, {}, {} sources, {} folds, {}'.format(' '.join(args['data']['directed_interactions_filename']),
args['data']['sources_filename'].split('_')[-1],
n_experiments, n_folds, source_type))
plt.savefig(path.join(output_file_path, 'auc_curve_{}'.format(source_type)))
plt.close()
| 46.396226
| 202
| 0.649044
|
4dc4bb5177bd0a8ce48a9749a454b6b009f3bdaf
| 16,307
|
py
|
Python
|
sentry_sdk/serializer.py
|
annu-ps31/sentry-python
|
3966b4a9744bfcb8c53dcca1b615bbadf4935aec
|
[
"BSD-2-Clause"
] | 1,213
|
2018-06-19T00:51:01.000Z
|
2022-03-31T06:37:16.000Z
|
sentry_sdk/serializer.py
|
annu-ps31/sentry-python
|
3966b4a9744bfcb8c53dcca1b615bbadf4935aec
|
[
"BSD-2-Clause"
] | 1,020
|
2018-07-16T12:50:36.000Z
|
2022-03-31T20:42:49.000Z
|
sentry_sdk/serializer.py
|
annu-ps31/sentry-python
|
3966b4a9744bfcb8c53dcca1b615bbadf4935aec
|
[
"BSD-2-Clause"
] | 340
|
2018-07-16T12:47:27.000Z
|
2022-03-22T10:13:21.000Z
|
import sys
import math
from datetime import datetime
from sentry_sdk.utils import (
AnnotatedValue,
capture_internal_exception,
disable_capture_event,
format_timestamp,
json_dumps,
safe_repr,
strip_string,
)
import sentry_sdk.utils
from sentry_sdk._compat import text_type, PY2, string_types, number_types, iteritems
from sentry_sdk._types import MYPY
if MYPY:
from datetime import timedelta
from types import TracebackType
from typing import Any
from typing import Callable
from typing import ContextManager
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Type
from typing import Union
from sentry_sdk._types import NotImplementedType, Event
Span = Dict[str, Any]
ReprProcessor = Callable[[Any, Dict[str, Any]], Union[NotImplementedType, str]]
Segment = Union[str, int]
if PY2:
# Importing ABCs from collections is deprecated, and will stop working in 3.8
# https://github.com/python/cpython/blob/master/Lib/collections/__init__.py#L49
from collections import Mapping, Sequence, Set
serializable_str_types = string_types
else:
# New in 3.3
# https://docs.python.org/3/library/collections.abc.html
from collections.abc import Mapping, Sequence, Set
# Bytes are technically not strings in Python 3, but we can serialize them
serializable_str_types = (str, bytes)
# Maximum length of JSON-serialized event payloads that can be safely sent
# before the server may reject the event due to its size. This is not intended
# to reflect actual values defined server-side, but rather only be an upper
# bound for events sent by the SDK.
#
# Can be overwritten if wanting to send more bytes, e.g. with a custom server.
# When changing this, keep in mind that events may be a little bit larger than
# this value due to attached metadata, so keep the number conservative.
MAX_EVENT_BYTES = 10 ** 6
MAX_DATABAG_DEPTH = 5
MAX_DATABAG_BREADTH = 10
CYCLE_MARKER = u"<cyclic>"
global_repr_processors = [] # type: List[ReprProcessor]
def add_global_repr_processor(processor):
# type: (ReprProcessor) -> None
global_repr_processors.append(processor)
class Memo(object):
__slots__ = ("_ids", "_objs")
def __init__(self):
# type: () -> None
self._ids = {} # type: Dict[int, Any]
self._objs = [] # type: List[Any]
def memoize(self, obj):
# type: (Any) -> ContextManager[bool]
self._objs.append(obj)
return self
def __enter__(self):
# type: () -> bool
obj = self._objs[-1]
if id(obj) in self._ids:
return True
else:
self._ids[id(obj)] = obj
return False
def __exit__(
self,
ty, # type: Optional[Type[BaseException]]
value, # type: Optional[BaseException]
tb, # type: Optional[TracebackType]
):
# type: (...) -> None
self._ids.pop(id(self._objs.pop()), None)
def serialize(event, smart_transaction_trimming=False, **kwargs):
# type: (Event, bool, **Any) -> Event
memo = Memo()
path = [] # type: List[Segment]
meta_stack = [] # type: List[Dict[str, Any]]
span_description_bytes = [] # type: List[int]
def _annotate(**meta):
# type: (**Any) -> None
while len(meta_stack) <= len(path):
try:
segment = path[len(meta_stack) - 1]
node = meta_stack[-1].setdefault(text_type(segment), {})
except IndexError:
node = {}
meta_stack.append(node)
meta_stack[-1].setdefault("", {}).update(meta)
def _should_repr_strings():
# type: () -> Optional[bool]
"""
By default non-serializable objects are going through
safe_repr(). For certain places in the event (local vars) we
want to repr() even things that are JSON-serializable to
make their type more apparent. For example, it's useful to
see the difference between a unicode-string and a bytestring
when viewing a stacktrace.
For container-types we still don't do anything different.
Generally we just try to make the Sentry UI present exactly
what a pretty-printed repr would look like.
:returns: `True` if we are somewhere in frame variables, and `False` if
we are in a position where we will never encounter frame variables
when recursing (for example, we're in `event.extra`). `None` if we
are not (yet) in frame variables, but might encounter them when
recursing (e.g. we're in `event.exception`)
"""
try:
p0 = path[0]
if p0 == "stacktrace" and path[1] == "frames" and path[3] == "vars":
return True
if (
p0 in ("threads", "exception")
and path[1] == "values"
and path[3] == "stacktrace"
and path[4] == "frames"
and path[6] == "vars"
):
return True
except IndexError:
return None
return False
def _is_databag():
# type: () -> Optional[bool]
"""
A databag is any value that we need to trim.
:returns: Works like `_should_repr_strings()`. `True` for "yes",
`False` for :"no", `None` for "maybe soon".
"""
try:
rv = _should_repr_strings()
if rv in (True, None):
return rv
p0 = path[0]
if p0 == "request" and path[1] == "data":
return True
if p0 == "breadcrumbs" and path[1] == "values":
path[2]
return True
if p0 == "extra":
return True
except IndexError:
return None
return False
def _serialize_node(
obj, # type: Any
is_databag=None, # type: Optional[bool]
should_repr_strings=None, # type: Optional[bool]
segment=None, # type: Optional[Segment]
remaining_breadth=None, # type: Optional[int]
remaining_depth=None, # type: Optional[int]
):
# type: (...) -> Any
if segment is not None:
path.append(segment)
try:
with memo.memoize(obj) as result:
if result:
return CYCLE_MARKER
return _serialize_node_impl(
obj,
is_databag=is_databag,
should_repr_strings=should_repr_strings,
remaining_depth=remaining_depth,
remaining_breadth=remaining_breadth,
)
except BaseException:
capture_internal_exception(sys.exc_info())
if is_databag:
return u"<failed to serialize, use init(debug=True) to see error logs>"
return None
finally:
if segment is not None:
path.pop()
del meta_stack[len(path) + 1 :]
def _flatten_annotated(obj):
# type: (Any) -> Any
if isinstance(obj, AnnotatedValue):
_annotate(**obj.metadata)
obj = obj.value
return obj
def _serialize_node_impl(
obj, is_databag, should_repr_strings, remaining_depth, remaining_breadth
):
# type: (Any, Optional[bool], Optional[bool], Optional[int], Optional[int]) -> Any
if should_repr_strings is None:
should_repr_strings = _should_repr_strings()
if is_databag is None:
is_databag = _is_databag()
if is_databag and remaining_depth is None:
remaining_depth = MAX_DATABAG_DEPTH
if is_databag and remaining_breadth is None:
remaining_breadth = MAX_DATABAG_BREADTH
obj = _flatten_annotated(obj)
if remaining_depth is not None and remaining_depth <= 0:
_annotate(rem=[["!limit", "x"]])
if is_databag:
return _flatten_annotated(strip_string(safe_repr(obj)))
return None
if is_databag and global_repr_processors:
hints = {"memo": memo, "remaining_depth": remaining_depth}
for processor in global_repr_processors:
result = processor(obj, hints)
if result is not NotImplemented:
return _flatten_annotated(result)
if obj is None or isinstance(obj, (bool, number_types)):
if should_repr_strings or (
isinstance(obj, float) and (math.isinf(obj) or math.isnan(obj))
):
return safe_repr(obj)
else:
return obj
elif isinstance(obj, datetime):
return (
text_type(format_timestamp(obj))
if not should_repr_strings
else safe_repr(obj)
)
elif isinstance(obj, Mapping):
# Create temporary copy here to avoid calling too much code that
# might mutate our dictionary while we're still iterating over it.
obj = dict(iteritems(obj))
rv_dict = {} # type: Dict[str, Any]
i = 0
for k, v in iteritems(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
str_k = text_type(k)
v = _serialize_node(
v,
segment=str_k,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
rv_dict[str_k] = v
i += 1
return rv_dict
elif not isinstance(obj, serializable_str_types) and isinstance(
obj, (Set, Sequence)
):
rv_list = []
for i, v in enumerate(obj):
if remaining_breadth is not None and i >= remaining_breadth:
_annotate(len=len(obj))
break
rv_list.append(
_serialize_node(
v,
segment=i,
should_repr_strings=should_repr_strings,
is_databag=is_databag,
remaining_depth=remaining_depth - 1
if remaining_depth is not None
else None,
remaining_breadth=remaining_breadth,
)
)
return rv_list
if should_repr_strings:
obj = safe_repr(obj)
else:
if isinstance(obj, bytes):
obj = obj.decode("utf-8", "replace")
if not isinstance(obj, string_types):
obj = safe_repr(obj)
# Allow span descriptions to be longer than other strings.
#
# For database auto-instrumented spans, the description contains
# potentially long SQL queries that are most useful when not truncated.
# Because arbitrarily large events may be discarded by the server as a
# protection mechanism, we dynamically limit the description length
# later in _truncate_span_descriptions.
if (
smart_transaction_trimming
and len(path) == 3
and path[0] == "spans"
and path[-1] == "description"
):
span_description_bytes.append(len(obj))
return obj
return _flatten_annotated(strip_string(obj))
def _truncate_span_descriptions(serialized_event, event, excess_bytes):
# type: (Event, Event, int) -> None
"""
Modifies serialized_event in-place trying to remove excess_bytes from
span descriptions. The original event is used read-only to access the
span timestamps (represented as RFC3399-formatted strings in
serialized_event).
It uses heuristics to prioritize preserving the description of spans
that might be the most interesting ones in terms of understanding and
optimizing performance.
"""
# When truncating a description, preserve a small prefix.
min_length = 10
def shortest_duration_longest_description_first(args):
# type: (Tuple[int, Span]) -> Tuple[timedelta, int]
i, serialized_span = args
span = event["spans"][i]
now = datetime.utcnow()
start = span.get("start_timestamp") or now
end = span.get("timestamp") or now
duration = end - start
description = serialized_span.get("description") or ""
return (duration, -len(description))
# Note: for simplicity we sort spans by exact duration and description
# length. If ever needed, we could have a more involved heuristic, e.g.
# replacing exact durations with "buckets" and/or looking at other span
# properties.
path.append("spans")
for i, span in sorted(
enumerate(serialized_event.get("spans") or []),
key=shortest_duration_longest_description_first,
):
description = span.get("description") or ""
if len(description) <= min_length:
continue
excess_bytes -= len(description) - min_length
path.extend([i, "description"])
# Note: the last time we call strip_string we could preserve a few
# more bytes up to a total length of MAX_EVENT_BYTES. Since that's
# not strictly required, we leave it out for now for simplicity.
span["description"] = _flatten_annotated(
strip_string(description, max_length=min_length)
)
del path[-2:]
del meta_stack[len(path) + 1 :]
if excess_bytes <= 0:
break
path.pop()
del meta_stack[len(path) + 1 :]
disable_capture_event.set(True)
try:
rv = _serialize_node(event, **kwargs)
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
sum_span_description_bytes = sum(span_description_bytes)
if smart_transaction_trimming and sum_span_description_bytes > 0:
span_count = len(event.get("spans") or [])
# This is an upper bound of how many bytes all descriptions would
# consume if the usual string truncation in _serialize_node_impl
# would have taken place, not accounting for the metadata attached
# as event["_meta"].
descriptions_budget_bytes = span_count * sentry_sdk.utils.MAX_STRING_LENGTH
# If by not truncating descriptions we ended up with more bytes than
# per the usual string truncation, check if the event is too large
# and we need to truncate some descriptions.
#
# This is guarded with an if statement to avoid JSON-encoding the
# event unnecessarily.
if sum_span_description_bytes > descriptions_budget_bytes:
original_bytes = len(json_dumps(rv))
excess_bytes = original_bytes - MAX_EVENT_BYTES
if excess_bytes > 0:
# Event is too large, will likely be discarded by the
# server. Trim it down before sending.
_truncate_span_descriptions(rv, event, excess_bytes)
# Span descriptions truncated, set or reset _meta.
#
# We run the same code earlier because we want to account
# for _meta when calculating original_bytes, the number of
# bytes in the JSON-encoded event.
if meta_stack and isinstance(rv, dict):
rv["_meta"] = meta_stack[0]
return rv
finally:
disable_capture_event.set(False)
| 35.144397
| 90
| 0.579935
|
e9e8ef7ff914b562b0a3541a1fb075f6241b7aea
| 13,814
|
py
|
Python
|
examples/ex1-london/ex1-london.py
|
hidekb/PyRossGeo
|
0d245a547add212f27be00bf234235cbd1db65f9
|
[
"MIT"
] | 12
|
2020-05-12T09:18:48.000Z
|
2020-10-23T13:29:24.000Z
|
examples/ex1-london/ex1-london.py
|
hidekb/PyRossGeo
|
0d245a547add212f27be00bf234235cbd1db65f9
|
[
"MIT"
] | null | null | null |
examples/ex1-london/ex1-london.py
|
hidekb/PyRossGeo
|
0d245a547add212f27be00bf234235cbd1db65f9
|
[
"MIT"
] | 5
|
2020-05-15T15:53:08.000Z
|
2020-07-20T23:31:38.000Z
|
# ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.5'
# jupytext_version: 1.4.2
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# +
# %%capture
# Compile and import local pyrossgeo module
import os, sys
owd = os.getcwd()
os.chdir('../../')
sys.path.insert(0,'../../')
# !python setup.py build_ext --inplace
os.chdir(owd)
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
import pyrossgeo
import pandas as pd
import json
# -
# # Generate the configuration files
# ### Define model
# +
model = {
"settings" : {
"classes" : ["S", "E", "A", "I", "R"],
"stochastic_threshold_from_below" : [1000, 1000, 1000, 1000, 1000],
"stochastic_threshold_from_above" : [500, 500, 500, 500, 500],
"infection_scaling" : "powerlaw",
"infection_scaling_parameters" : [0, 0.004, 0.5] # a + b * rho^c
},
"S" : {
"linear" : [],
"infection" : [ ["I", "-betaI"], ["A", "-betaA"] ]
},
"E" : {
"linear" : [ ["E", "-gammaE"] ],
"infection" : [ ["I", "betaI"], ["A", "betaA"] ]
},
"A" : {
"linear" : [ ["E", "gammaE"], ["A", "-gammaA"] ],
"infection" : []
},
"I" : {
"linear" : [ ["A", "gammaA"], ["I", "-gammaI"] ],
"infection" : []
},
"R" : {
"linear" : [ ["I", "gammaI"] ],
"infection" : []
}
}
model_classes = model['settings']['classes']
model_dim = len(model_classes)
# -
# ### Configuration generation parameters
#
# Here we define some parameters with which all the configuration files will be generated. Edit these if you want to change the simulation.
# +
sim_config_path = 'london_simulation'
min_num_moving = 20 # Remove all commuting edges where less than `min_num_moving` are moving
# Decide which classes are allowed to commute
allow_class = [
('S', True),
('E', True),
('A', True),
('Ia1', True),
('Ia2', True),
('Ia3', True),
('Is1', True),
('Is2', False),
('Is3', False),
('R', True),
]
# Decide where to seed with infecteds
seed_pop = [
(0, 1, 'E', 100), # Home, age group, model class, seed quantity
(10, 2, 'E', 100),
(23, 0, 'E', 100),
(622, 4, 'E', 100),
(232, 4, 'E', 100)
]
# Node parameters
n_betaI = 0.02
n_betaA = 0.02
n_gammaE = 1/3.0
n_gammaA = 1/3.0
n_gammaI = 1/3.0
# Cnode parameters
cn_betaI = n_betaI
cn_betaA = n_betaA
cn_gammaE = n_gammaE
cn_gammaA = n_gammaA
cn_gammaI = n_gammaI
# Time steps
t_start = 0
t_end = 24*60*100
_, dts = pyrossgeo.utils.get_dt_schedule([
(0, 1*60),
(7*60, 2),
(10*60, 2*60),
(17*60, 2),
(19*60, 2*60)
], end_time=24*60)
# -
# ### Format the commuting network
# +
cn = pd.read_csv("%s/commuter_networks.csv" % sim_config_path)
#### Set which classes are allowed to commute
# Drop the current allow_O columns
cn = cn.iloc[:,:10]
# Set allow settings
for O, allow_O in allow_class:
cn[ "Allow %s" % O ] = 1 if allow_O else 0
# Allow people to return home
cn.loc[ cn['Home'] == cn['To'],"Allow %s" % allow_class[0][0]:] = 1
#### Remove commuting edges where fewer than `min_num_moving` people are commuting
delete_rows = []
for i, row in cn.loc[ cn['Home'] == cn['From'] ].iterrows():
if row['# to move'] < min_num_moving:
delete_rows.append(i)
delete_rows.append(i+1) # Delete the returning commuting edge as well
cn = cn.reset_index()
cn = cn.drop(delete_rows)
cn = cn.drop(columns='index')
cn.loc[cn['ct1'] == cn['ct2'], 'ct2'] += 0.1
cn.head()
# -
# ### Populate the network
# Our `node_populations.csv` currently only has the total population for each age group at each node. In order to use it for the simulation, we must populate it with the model classes, as well as seed some infections.
tot_pop = pd.read_csv("%s/node_populations.csv" % sim_config_path)
tot_pop.head()
# +
# Create all model classes, and set everyone to be susceptible
npop = pd.DataFrame()
npop['Home'] = tot_pop['Home']
npop['Location'] = tot_pop['Location']
for _cn, _cd in tot_pop.iloc[:,2:].iteritems():
for O in model['settings']['classes']:
npop["%s%s" % (O, _cn[1:])] = 0
npop["%s%s" % ("S", _cn[1:])] = _cd
# Seed with infecteds
for home, age, O, seed_quantity in seed_pop:
row_i = npop[npop['Home'] == home].index[0]
col_i = 2 + age*model_dim
S = npop.iloc[row_i,col_i]
npop.iloc[row_i, col_i + model_classes.index('E')] = seed_quantity
npop.iloc[row_i, col_i] -= seed_quantity
# -
# ### Setting the node and cnode parameters
# We need to add rows giving the model parameters in `node_parameters.csv` and `cnode_parameters.csv`, which currently only has the areas of each geographical node:
nparam = pd.read_csv('london_simulation/node_parameters.csv')
cnparam = pd.read_csv('london_simulation/cnode_parameters.csv')
nparam.head()
# +
cnparam['betaI'] = cn_betaI
cnparam['betaA'] = cn_betaA
cnparam['gammaE'] = cn_gammaE
cnparam['gammaA'] = cn_gammaA
cnparam['gammaI'] = cn_gammaI
nparam = nparam.append({
'Home' : 'ALL',
'Location' : 'ALL',
'Age' : 'ALL',
'betaI' : n_betaI,
'betaA' : n_betaA,
'gammaE' : n_gammaE,
'gammaA' : n_gammaA,
'gammaI' : n_gammaI,
}, ignore_index=True)
nparam.iloc[-2:-1,:]
# -
# ### Contact matrices
#
# Define the contact matrices
# +
C_home = np.array( [
[5.0,4.83,4.69,4.58,4.48,4.4,4.33,4.28,4.23],
[4.83,5.0,4.83,4.69,4.58,4.48,4.4,4.33,4.28],
[4.69,4.83,5.0,4.83,4.69,4.58,4.48,4.4,4.33],
[4.58,4.69,4.83,5.0,4.83,4.69,4.58,4.48,4.4],
[4.48,4.58,4.69,4.83,5.0,4.83,4.69,4.58,4.48],
[4.4,4.48,4.58,4.69,4.83,5.0,4.83,4.69,4.58],
[4.33,4.4,4.48,4.58,4.69,4.83,5.0,4.83,4.69],
[4.28,4.33,4.4,4.48,4.58,4.69,4.83,5.0,4.83],
[4.23,4.28,4.33,4.4,4.48,4.58,4.69,4.83,5.0],
] )
C_school = np.array( [
[8.0,7.83,7.69,0.25,0.19,0.15,0.12,0.1,0.09],
[7.83,8.0,7.83,0.26,0.19,0.15,0.12,0.1,0.09],
[7.69,7.83,8.0,0.26,0.19,0.15,0.12,0.11,0.09],
[0.25,0.26,0.26,0.27,0.2,0.15,0.13,0.11,0.09],
[0.19,0.19,0.19,0.2,0.2,0.16,0.13,0.11,0.09],
[0.15,0.15,0.15,0.15,0.16,0.16,0.13,0.11,0.09],
[0.12,0.12,0.12,0.13,0.13,0.13,0.13,0.11,0.1],
[0.1,0.1,0.11,0.11,0.11,0.11,0.11,0.11,0.1],
[0.09,0.09,0.09,0.09,0.09,0.09,0.1,0.1,0.1]
])
C_work = np.array( [
[0.08,0.07,0.07,0.07,0.07,0.07,0.07,0.07,0.07],
[0.07,0.09,0.08,0.08,0.08,0.08,0.08,0.08,0.08],
[0.07,0.08,0.1,0.1,0.09,0.09,0.09,0.09,0.09],
[0.07,0.08,0.1,0.12,0.12,0.11,0.11,0.11,0.11],
[0.07,0.08,0.09,0.12,0.15,0.15,0.14,0.14,0.14],
[0.07,0.08,0.09,0.11,0.15,0.2,0.19,0.19,0.19],
[0.07,0.08,0.09,0.11,0.14,0.19,6.0,5.83,5.69],
[0.07,0.08,0.09,0.11,0.14,0.19,5.83,6.0,5.83],
[0.07,0.08,0.09,0.11,0.14,0.19,5.69,5.83,6.0]
])
C_transport = np.array( [
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0],
[10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0,10.0]
])
contact_matrices = {
'C' : C_home + C_school + C_work,
'C_commute' : C_transport
}
# +
ncm = pd.DataFrame(columns=['Home', 'Location'] + model['settings']['classes'])
ncm = ncm.append({
'Home' : 'ALL',
'Location' : 'ALL',
'A' : 'C',
'I' : 'C'
}, ignore_index=True)
# +
cncm = pd.DataFrame(columns=['Home', 'From', 'To'] + model['settings']['classes'])
cncm = cncm.append({
'Home' : 'ALL',
'From' : 'ALL',
'To' : 'ALL',
'A' : 'C_commute',
'I' : 'C_commute'
}, ignore_index=True)
# -
# ## Run simulation
# +
sim = pyrossgeo.Simulation()
X_state = sim.initialize(
model_dat = model,
commuter_networks_dat = cn,
node_populations_dat = npop,
node_parameters_dat = nparam,
cnode_parameters_dat = cnparam,
contact_matrices_dat = contact_matrices,
node_cmatrices_dat = ncm,
cnode_cmatrices_dat = cncm
)
sim_data = sim.simulate(X_state, t_start, t_end, dts, steps_per_save=len(dts), steps_per_print=len(dts))
ts, node_data, cnode_data, location_data, community_data, network_data = pyrossgeo.utils.extract_simulation_data(sim_data)
ts_days = ts / (24*60)
ts_hours = ts / 60
# -
# ## Plot the result
# Plot the evolution of the whole network
# +
plt.figure( figsize=(8,3) )
S = np.sum(network_data[:,:,0], axis=1)
E = np.sum(network_data[:,:,1], axis=1)
A = np.sum(network_data[:,:,2], axis=1)
I = np.sum(network_data[:,:,3], axis=1)
R = np.sum(network_data[:,:,4], axis=1)
plt.plot(ts_days, S, label="S")
plt.plot(ts_days, E, label="I")
plt.plot(ts_days, A, label="I")
plt.plot(ts_days, I, label="I")
plt.plot(ts_days, R, label="R")
plt.legend(loc='upper right', fontsize=12)
plt.xlabel('Days')
# -
# ### Plotting the result using GeoPandas
# Assemble geo data and define helper functions. Edit `plot_frame` to change the format of the video.
# +
import pickle
import tempfile
import geopandas as gpd
from geopandas.plotting import plot_polygon_collection
from matplotlib import animation
# Simulation data
N_ = np.sum(location_data[:,:,:,:], axis=(1,2))
S_ = np.sum(location_data[:,:,0,:], axis=1)
E_ = np.sum(location_data[:,:,1,:], axis=1)
A_ = np.sum(location_data[:,:,2,:], axis=1)
I_ = np.sum(location_data[:,:,3,:], axis=1)
R_ = np.sum(location_data[:,:,4,:], axis=1)
s_ = S_ / N_
e_ = E_ / N_
a_ = A_ / N_
i_ = I_ / N_
r_ = R_ / N_
ts_days = pyrossgeo.utils.extract_ts(sim_data) / (24*60)
epi_data = np.sum(np.array([ # Used to plot pandemic curves
S_,E_,A_,I_,R_
]), axis=2)
# Load geometry
geometry_node_key = 'msoa11cd'
geometry = gpd.read_file("../geodata/london_geo/london_msoa_shapes/Middle_Layer_Super_Output_Areas_December_2011_Boundaries_EW_BGC.shp")
loc_table = pd.read_csv('london_simulation/loc_table.csv')
loc_table_loc_col = loc_table.columns[0]
loc_table_loc_key_col = loc_table.columns[1]
geometry = geometry[ geometry[geometry_node_key].isin(loc_table.iloc[:,1]) ] # Remove locations in geometry that are not in loc_table
geometry = geometry.merge(loc_table, left_on=geometry_node_key, right_on=loc_table_loc_key_col) # Add location indices
geometry = geometry.sort_values(by=loc_table_loc_col) # Sort them by location indices
# Edit this function to adjust the layout of the video
def plot_frame(ti, close_plot=False, tmp_save=None):
fig, axes = plt.subplots(ncols=3, nrows=2, gridspec_kw={'width_ratios':[1, 1, 1.3]}, figsize=(18, 14))
geometry['S'] = s_[ti,:]
geometry['E'] = e_[ti,:]
geometry['A'] = a_[ti,:]
geometry['I'] = i_[ti,:]
geometry['R'] = r_[ti,:]
plot_geo(geometry, axes[0,0], vmin=0, vmax=1, value_key='S', title="Susceptible", legend=False)
plot_geo(geometry, axes[0,1], vmin=0, vmax=1, value_key='E', title="Exposed", legend=False)
plot_geo(geometry, axes[0,2], vmin=0, vmax=1, value_key='A', title="Activated", legend=True)
plot_geo(geometry, axes[1,0], vmin=0, vmax=1, value_key='I', title="Infected", legend=False)
plot_geo(geometry, axes[1,1], vmin=0, vmax=1, value_key='R', title="Recovered", legend=False)
plot_epi(axes[1,2], ti, ts_days, epi_data, ['S','E','A','I','R'])
fig.tight_layout(rect=[0, 0.03, 1, 0.92])
fig.suptitle("SEAIR Model - Day %s" % ti, fontsize=18)
if not tmp_save is None:
plt.savefig(tmp_save.name + '/%s.png' % ti)
if close_plot:
plt.close(fig)
if not tmp_save is None:
return tmp_save.name + '/%s.png' % ti
# Helper functions for plotting
def plot_geo(geometry, ax, vmin, vmax, value_key='val', title="", legend=True, legend_label='', cax=None, axis_on=False):
if legend:
if cax is None:
geometry.plot(column=value_key, ax=ax, vmin=vmin, vmax=vmax, legend=True, legend_kwds={'label': legend_label})
else:
geometry.plot(column=value_key, ax=ax, cax=cax, vmin=vmin, vmax=vmax, legend=True, legend_kwds={'label': legend_label})
else:
geometry.plot(column=value_key, ax=ax, cax=cax, vmin=vmin, vmax=vmax, legend=False)
ax.set_title(title)
if not axis_on:
ax.set_axis_off()
def plot_epi(ax, ti, ts, epi_data, epi_data_labels):
for oi in range(epi_data.shape[0]):
ax.plot(ts[:ti], epi_data[oi,:ti], label=epi_data_labels[oi])
ax.legend(loc='center left')
ax.set_xlim(np.min(ts_days), np.max(ts_days))
ax.set_ylim(0, np.max(epi_data))
# -
# Plot the pandemic at a given day
# +
day = 50
geometry['S'] = s_[day,:]
geometry['E'] = e_[day,:]
geometry['A'] = a_[day,:]
geometry['I'] = i_[day,:]
geometry['R'] = r_[day,:]
fig, ax = plt.subplots(figsize=(7, 5))
plot_geo(geometry, ax, vmin=0, vmax=1, value_key='S', title='Susceptibles at day %s' % day)
# +
day = 50
plot_frame(day)
# -
# Create a video of the pandemic
# +
tmp_dir = tempfile.TemporaryDirectory()
frames_paths = []
for ti in range(len(ts)):
if ti % 1 == 0:
print("Frame %s of %s" % (ti, len(ts)))
frame_path = plot_frame(ti, close_plot=True, tmp_save=tmp_dir)
frames_paths.append(frame_path)
import cv2
video_name = 'sim_video.mp4'
frame = cv2.imread(frames_paths[0])
height, width, layers = frame.shape
fps = 6
#codec=cv2.VideoWriter_fourcc('D', 'I', 'V', 'X')
codec=cv2.VideoWriter_fourcc(*'DIVX')
video = cv2.VideoWriter(video_name, codec, fps, (width,height))
for frame_path in frames_paths:
video.write(cv2.imread(frame_path))
cv2.destroyAllWindows()
video.release()
| 26.927875
| 217
| 0.617417
|
42f36a2ff8cb08238011480c34325960099f25a7
| 1,239
|
py
|
Python
|
tests/conftest.py
|
thekashifmalik/dynaconf
|
2cb4ad53e9dff7ed2582a48ec975fa86780ab911
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
thekashifmalik/dynaconf
|
2cb4ad53e9dff7ed2582a48ec975fa86780ab911
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
thekashifmalik/dynaconf
|
2cb4ad53e9dff7ed2582a48ec975fa86780ab911
|
[
"MIT"
] | null | null | null |
# coding: utf-8
import os
import pytest
from dynaconf.base import LazySettings
@pytest.fixture(scope='module')
def settings():
"""Settings fixture with some defaults"""
mode = 'TRAVIS' if os.environ.get('TRAVIS') else 'TEST'
loaders = ['dynaconf.loaders.env_loader']
os.environ['DYNA%s_HOSTNAME' % mode] = 'host.com'
os.environ['DYNA%s_PORT' % mode] = '@int 5000'
os.environ['DYNA%s_VALUE' % mode] = '@float 42.1'
os.environ['DYNA%s_ALIST' % mode] = '@json ["item1", "item2", "item3"]'
os.environ['DYNA%s_ADICT' % mode] = '@json {"key": "value"}'
os.environ['DYNA%s_DEBUG' % mode] = '@bool true'
os.environ['DYNA%s_TODELETE' % mode] = '@bool true'
os.environ['PROJECT1_HOSTNAME'] = 'otherhost.com'
sets = LazySettings(
LOADERS_FOR_DYNACONF=loaders,
NAMESPACE_FOR_DYNACONF="DYNA%s" % mode,
boxed_data={
'HOST': 'server.com',
'port': 8080,
'PARAMS': {
'username': 'admin',
'PASSWORD': 'secret',
'token': {
'TYPE': 1,
'value': 2
}
}
}
)
sets.SIMPLE_BOOL = False
sets.configure()
return sets
| 30.975
| 75
| 0.546408
|
79dc41de90e500800353b6c12a5e9fe392da8184
| 598
|
py
|
Python
|
rtl/tests/icebreaker-top/tests.py
|
andars/4ft4
|
9417585b62b77f0bda72091bcc064eb2b8b623a8
|
[
"MIT"
] | null | null | null |
rtl/tests/icebreaker-top/tests.py
|
andars/4ft4
|
9417585b62b77f0bda72091bcc064eb2b8b623a8
|
[
"MIT"
] | null | null | null |
rtl/tests/icebreaker-top/tests.py
|
andars/4ft4
|
9417585b62b77f0bda72091bcc064eb2b8b623a8
|
[
"MIT"
] | null | null | null |
import cocotb
from cocotb.clock import Clock
from cocotb.triggers import ClockCycles
@cocotb.test()
async def test_icebreaker_top(dut):
# start a 12 MHz clock
clock = Clock(dut.clock, round(1e3/100), units="ns")
cocotb.start_soon(clock.start())
# reset
dut.reset.value = 1
await ClockCycles(dut.clock, 32)
dut.reset.value = 0
dut._log.info("begin test of icebreaker_top")
# give the cpu some time to run
await ClockCycles(dut.clock, 8 * 16)
# check the ROM io output port (connected to `leds`)
dut._log.info(dut.leds)
assert dut.leds == 0xc
| 24.916667
| 56
| 0.682274
|
0a1e7a9d481c669f585cdff7a6a7697df5befedb
| 2,010
|
py
|
Python
|
pigLatin.py
|
valentinsoare/currentlyLearningPythonProjectsBased
|
740acdecec1de2ac827eb46cc72a060bb0bdf954
|
[
"MIT"
] | 1
|
2021-08-17T02:27:18.000Z
|
2021-08-17T02:27:18.000Z
|
pigLatin.py
|
valentinsoare/currentlyLearningPythonProjectsBased
|
740acdecec1de2ac827eb46cc72a060bb0bdf954
|
[
"MIT"
] | null | null | null |
pigLatin.py
|
valentinsoare/currentlyLearningPythonProjectsBased
|
740acdecec1de2ac827eb46cc72a060bb0bdf954
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3
import re
def check_if_punctuation(given_word):
checking = re.search(r'(([a-z]+)(\W))', given_word)
if checking:
punctuation_checking = 1
return punctuation_checking, checking.group(1), checking.group(2), checking.group(3)
else:
punctuation_checking = 0
return punctuation_checking, given_word, given_word, ''
def check_word_with_capitalized_letter(given_word):
if given_word[0].isupper():
return True
else:
return False
def prepare_for_commas(given_word):
comma_condition, entire_word, word, punctuation = check_if_punctuation(given_word.lower())
word_length = len(word)
if comma_condition == 1:
final_word = word[1] + word[2:word_length] + word[0] + 'ay' + punctuation
else:
final_word = word[1] + word[2:word_length] + word[0] + 'ay'
return final_word
def generate_pig_latin_word(given_word):
if check_word_with_capitalized_letter(given_word):
given_word = prepare_for_commas(given_word).lower()
final_piglatin_word = given_word[0].capitalize() + given_word[1:(len(given_word))]
else:
given_word = given_word.lower()
final_piglatin_word = prepare_for_commas(given_word)
return final_piglatin_word
def create_phrase(input_word):
phrase = ''
check = re.search(r'\w', input_word)
if check:
check_if_digit = re.search(r'\d', input_word)
if check_if_digit:
phrase += input_word + ' '
else:
phrase += generate_pig_latin_word(input_word) + ' '
else:
phrase += input_word
return phrase
def running_main(list_of_words):
phrase = ''
list_of_words_length = len(list_of_words)
for i in range(list_of_words_length):
phrase += create_phrase(list_of_words[i])
print(f'\n\033[1m-> Coded Phrase:\033[0m \033[1;34m{phrase}\033[0m\n')
given_text = input('\n\033[1m*Please enter your phrase: \033[0m')
running_main(given_text.split())
| 26.103896
| 94
| 0.669652
|
d7bbafbd905d7099cafc9f37751a06f98b857486
| 4,625
|
py
|
Python
|
examples/nlp/language_modeling/megatron_gpt_eval.py
|
Jorjeous/NeMo
|
cafc21ee6a0c7781fb08e9821c327b1ece1f83e3
|
[
"Apache-2.0"
] | null | null | null |
examples/nlp/language_modeling/megatron_gpt_eval.py
|
Jorjeous/NeMo
|
cafc21ee6a0c7781fb08e9821c327b1ece1f83e3
|
[
"Apache-2.0"
] | null | null | null |
examples/nlp/language_modeling/megatron_gpt_eval.py
|
Jorjeous/NeMo
|
cafc21ee6a0c7781fb08e9821c327b1ece1f83e3
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import ArgumentParser
import torch
from pytorch_lightning.trainer.trainer import Trainer
from torch.utils.data import DataLoader
from nemo.collections.nlp.data.language_modeling.megatron.gpt_request_dataset import GPTRequestDataset
from nemo.collections.nlp.models.language_modeling.megatron_gpt_model import MegatronGPTModel
from nemo.collections.nlp.modules.common.megatron.megatron_utils import compute_model_parallel_rank
from nemo.collections.nlp.parts.nlp_overrides import NLPDDPPlugin
from nemo.utils import logging
from nemo.utils.app_state import AppState
"""
Usage:
a. If you need to run model on a few prompts from the file:
python megatron_gpt_eval.py \
--model_file=PATH_TO_MODEL \
--path_to_file=PATH_TO_FILE \
--tokens_to_generate=32 \
--prompt .
b. If you need to run model on a prompt from the CLI:
python megatron_gpt_eval.py \
--model_file=PATH_TO_MODEL \
--tokens_to_generate=32 \
--prompt=YOUR_PROMPT
"""
assert torch.cuda.is_available()
def main():
parser = ArgumentParser()
parser.add_argument("--model_file", type=str, default="", required=True, help="Pass path to model's .nemo file")
parser.add_argument(
"--path_to_file", type=str, default="", required=False, help="Path to file with prompts (a text to complete)"
)
parser.add_argument(
"--prompt", type=str, default="", required=True, help="Prompt for the model (a text to complete)"
)
parser.add_argument(
"--tokens_to_generate", type=int, default="64", required=False, help="How many tokens to add to prompt"
)
parser.add_argument(
"--stop_after_sentence",
type=bool,
default="True",
required=False,
help="True/False: whether to stop after full sentence has been generated.",
)
parser.add_argument(
"--tensor_model_parallel_size", type=int, default=1, required=False,
)
parser.add_argument("--precision", default=32, help="PyTorch Lightning Trainer precision flag")
args = parser.parse_args()
# cast precision to int if 32 or 16
if args.precision in ["32", "16"]:
args.precision = int(float(args.precision))
# trainer required for restoring model parallel models
trainer = Trainer(plugins=NLPDDPPlugin(), gpus=args.tensor_model_parallel_size, precision=args.precision)
app_state = AppState()
if args.tensor_model_parallel_size is not None and args.tensor_model_parallel_size > 1:
app_state.model_parallel_size = args.tensor_model_parallel_size
app_state.model_parallel_rank = compute_model_parallel_rank(trainer.local_rank, app_state.model_parallel_size)
model = MegatronGPTModel.restore_from(restore_path=args.model_file, trainer=trainer)
model.freeze()
# defining type of request
if args.path_to_file != "":
data = []
prompts = open(args.path_to_file, 'r')
for prompt in prompts.readlines():
request = {
"prompt": prompt.split('\n')[0],
"tokens_to_generate": args.tokens_to_generate,
"stop_after_sentence": args.stop_after_sentence,
}
data.append(request)
dataset = GPTRequestDataset(data, model.tokenizer)
request_dl = DataLoader(dataset)
response = trainer.predict(model, request_dl)
else:
request = [
{
"prompt": args.prompt,
"tokens_to_generate": args.tokens_to_generate,
"stop_after_sentence": args.stop_after_sentence,
}
]
dataset = GPTRequestDataset(request, model.tokenizer)
request_dl = DataLoader(dataset)
response = trainer.predict(model, request_dl)
print("***************************")
print(response)
print("***************************")
if __name__ == '__main__':
main() # noqa pylint: disable=no-value-for-parameter
| 36.706349
| 118
| 0.678054
|
166cf6654f03b1821f66e00e419f15fb6b261535
| 2,718
|
py
|
Python
|
keras_cv/layers/preprocessing/solarization_test.py
|
kartik4949/keras-cv
|
4c300f564d8ec99cd1351c445e1803ee6664915a
|
[
"Apache-2.0"
] | null | null | null |
keras_cv/layers/preprocessing/solarization_test.py
|
kartik4949/keras-cv
|
4c300f564d8ec99cd1351c445e1803ee6664915a
|
[
"Apache-2.0"
] | null | null | null |
keras_cv/layers/preprocessing/solarization_test.py
|
kartik4949/keras-cv
|
4c300f564d8ec99cd1351c445e1803ee6664915a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2022 The KerasCV Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tensorflow as tf
from absl.testing import parameterized
from keras_cv.layers.preprocessing.solarization import Solarization
class SolarizationTest(tf.test.TestCase, parameterized.TestCase):
@parameterized.named_parameters(
("0_255", 0, 255),
("64_191", 64, 191),
("127_128", 127, 128),
("191_64", 191, 64),
("255_0", 255, 0),
)
def test_output_values(self, input_value, expected_value):
solarization = Solarization(value_range=(0, 255))
self._test_input_output(
layer=solarization,
input_value=input_value,
expected_value=expected_value,
dtype=tf.uint8,
)
@parameterized.named_parameters(
("0_245", 0, 245),
("255_0", 255, 0),
)
def test_solarization_with_addition(self, input_value, output_value):
solarization = Solarization(addition_factor=(10.0, 10.0), value_range=(0, 255))
self._test_input_output(
layer=solarization,
input_value=input_value,
expected_value=output_value,
dtype=tf.float32,
)
@parameterized.named_parameters(
("0_0", 0, 0),
("64_64", 64, 64),
("127_127", 127, 127),
("191_64", 191, 64),
("255_0", 255, 0),
)
def test_only_values_above_threshold_are_solarized(self, input_value, output_value):
solarization = Solarization(threshold_factor=(128, 128), value_range=(0, 255))
self._test_input_output(
layer=solarization,
input_value=input_value,
expected_value=output_value,
dtype=tf.uint8,
)
def _test_input_output(self, layer, input_value, expected_value, dtype):
input = tf.ones(shape=(2, 224, 224, 3), dtype=dtype) * input_value
expected_output = tf.clip_by_value(
(
tf.ones(shape=(2, 224, 224, 3), dtype=layer.compute_dtype)
* expected_value
),
0,
255,
)
output = layer(input)
self.assertAllClose(output, expected_output)
| 33.146341
| 88
| 0.633186
|
4b3ea336f2fa847fc2da5799030a9f017450f04b
| 1,561
|
py
|
Python
|
jogging-lawrence/src/graphics.py
|
alanxoc3/stupid-games
|
674a884eb428be5b6da47d0a41946929da79ae99
|
[
"MIT"
] | 1
|
2020-08-11T10:53:15.000Z
|
2020-08-11T10:53:15.000Z
|
jogging-lawrence/src/graphics.py
|
alanxoc3/stupid-games
|
674a884eb428be5b6da47d0a41946929da79ae99
|
[
"MIT"
] | null | null | null |
jogging-lawrence/src/graphics.py
|
alanxoc3/stupid-games
|
674a884eb428be5b6da47d0a41946929da79ae99
|
[
"MIT"
] | 1
|
2020-08-11T10:53:16.000Z
|
2020-08-11T10:53:16.000Z
|
import pygame
images = {}
_width = None
_height = None
_screen = None
_backBuf = None
_entities = []
_map = None
def reset():
del _entities[:]
# adds entity to entities - which holds what needs to be drawn
# each entity needs sprite_sheet, frame, weight
# , and FRAMES (ann array of x and y coods representing the positions on teh sprite sheet)
# also an x and y position
def register(entity):
global _entities
if entity not in _entities:
_entities.append(entity)
# Creates graphics object. sets its width, height and title of the window of the game
def init(width, height, title = 'Jogging Lawrence'):
global _width, _height, _screen, _backBuf
_width = width * 2
_height = height * 2
pygame.display.init()
# creates window
_screen = pygame.display.set_mode((_width, _height), pygame.DOUBLEBUF)
_backBuf = pygame.Surface((width, height))
# sets title of the window
pygame.display.set_caption(title)
# Draws all the entities.
def update():
global _screen, _entities, _backBuf
_backBuf.fill((0, 0, 0))
for entity in _entities:
entity.draw()
pygame.transform.scale(_backBuf, (_width, _height), _screen)
pygame.display.flip()
def blit(surface, pos):
global _backBuf
surface = pygame.transform.scale(surface, (surface.get_width(), surface.get_height()))
_backBuf.blit(surface, (pos[0], pos[1]))
def drawRect(rect):
pygame.draw.rect(_backBuf, (0, 0, 0), rect)
def load_image(path):
global images
if path in images:
return images[path]
image = pygame.image.load(path)
image.convert()
images[path] = image
return image
| 24.390625
| 90
| 0.72902
|
7891dfbf0df8f05e4d1c3bc3b09e53b60c6795a2
| 9,688
|
py
|
Python
|
nmt.py
|
rjhd2/sotc_graphics
|
e9d2b259fb09d6a7bc9262d33de25b1a113fb9b1
|
[
"BSD-3-Clause"
] | null | null | null |
nmt.py
|
rjhd2/sotc_graphics
|
e9d2b259fb09d6a7bc9262d33de25b1a113fb9b1
|
[
"BSD-3-Clause"
] | null | null | null |
nmt.py
|
rjhd2/sotc_graphics
|
e9d2b259fb09d6a7bc9262d33de25b1a113fb9b1
|
[
"BSD-3-Clause"
] | 1
|
2020-10-01T05:15:03.000Z
|
2020-10-01T05:15:03.000Z
|
#!/usr/bin/env python
#************************************************************************
#
# Plot figures and output numbers for NMAT section.
# For BAMS SotC 2020
#
#************************************************************************
# SVN Info
# $Rev:: 24 $: Revision of last commit
# $Author:: rdunn $: Author of last commit
# $Date:: 2019-03-11 12:46:29 +0000 (Mon, 11 Mar #$: Date of last commit
#************************************************************************
# START
#************************************************************************
import os
import datetime as dt
import copy
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib.ticker import MultipleLocator
import matplotlib.dates as mdates
import iris
import cartopy
import utils # RJHD utilities
import settings
DATALOC = "{}/{}/data/NMT/".format(settings.ROOTLOC, settings.YEAR)
LEGEND_LOC = 'lower right'
#************************************************************************
def read_csv_1(filename, dataset, region):
all_data = np.genfromtxt(filename, delimiter=",", skip_header=1, dtype=(str))
locs, = np.where(np.logical_and(all_data[:, 1] == dataset, all_data[:, 3] == region))
times = all_data[locs, 0].astype(float)
data = all_data[locs, 2]
locs, = np.where(data == "NA")
data[locs] = -99.9
data = np.ma.array(data).astype(float)
data.mask = np.zeros(data.shape)
data.mask[locs] = True
return utils.Timeseries(dataset, times, data) # read_csv_1
#************************************************************************
def read_csv_2(filename, dataset, region):
all_data = np.genfromtxt(filename, delimiter=",", skip_header=1, dtype=(str))
locs, = np.where(np.logical_and(all_data[:, 2] == dataset, all_data[:, 1] == region))
times = all_data[locs, 0].astype(float)
data = all_data[locs, 3].astype(float)
return utils.Timeseries(dataset, times, data) # read_csv_2
#************************************************************************
def run_all_plots():
#************************************************************************
# Timeseries 1 figure
if True:
fig, (ax1, ax2, ax3, ax4) = plt.subplots(4, figsize=(8, 10), sharex=True)
filename = "fig_1_data_v2.csv"
filename = "fig_1_data.csv"
# Globe
classnmat = read_csv_1(DATALOC + filename, "CLASSnmat v2", "Global")
uahnmat = read_csv_1(DATALOC + filename, "UAHNMAT v1", "Global")
hadsst = read_csv_1(DATALOC + filename, "HadSST4", "Global")
utils.plot_ts_panel(ax1, [classnmat, uahnmat, hadsst], "-", "temperature", loc=LEGEND_LOC)
ax1.text(0.02, 0.88, "(a) Global", transform=ax1.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Northern Extra-Tropics
classnmat = read_csv_1(DATALOC + filename, "CLASSnmat v2", "Northern Extra-Tropics")
uahnmat = read_csv_1(DATALOC + filename, "UAHNMAT v1", "Northern Extra-Tropics")
hadsst = read_csv_1(DATALOC + filename, "HadSST4", "Northern Extra-Tropics")
utils.plot_ts_panel(ax2, [classnmat, uahnmat, hadsst], "-", "temperature", loc="")
ax2.text(0.02, 0.88, "(b) Northern Extra-Tropics", transform=ax2.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Tropics
classnmat = read_csv_1(DATALOC + filename, "CLASSnmat v2", "Tropics")
uahnmat = read_csv_1(DATALOC + filename, "UAHNMAT v1", "Tropics")
hadsst = read_csv_1(DATALOC + filename, "HadSST4", "Tropics")
utils.plot_ts_panel(ax3, [classnmat, uahnmat, hadsst], "-", "temperature", loc="")
ax3.text(0.02, 0.88, "(c) Tropics", transform=ax3.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Southern Extra-Tropics
classnmat = read_csv_1(DATALOC + filename, "CLASSnmat v2", "Southern Extra-Tropics")
uahnmat = read_csv_1(DATALOC + filename, "UAHNMAT v1", "Southern Extra-Tropics")
hadsst = read_csv_1(DATALOC + filename, "HadSST4", "Southern Extra-Tropics")
utils.plot_ts_panel(ax4, [classnmat, uahnmat, hadsst], "-", "temperature", loc="")
ax4.text(0.02, 0.88, "(d) Southern Extra-Tropics", transform=ax4.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
plt.xlim([1897, int(settings.YEAR)+3])
for tick in ax4.xaxis.get_major_ticks():
tick.label.set_fontsize(settings.FONTSIZE)
for ax in [ax1, ax2, ax3, ax4]:
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(settings.FONTSIZE)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.set_ylim([-1.3, 1.3])
fig.text(0.01, 0.55, "Anomaly ("+r'$^\circ$'+"C)", fontsize=settings.FONTSIZE, rotation="vertical")
fig.subplots_adjust(right=0.98, top=0.98, bottom=0.04, hspace=0.001)
plt.savefig(settings.IMAGELOC+"NMT_ts{}".format(settings.OUTFMT))
plt.close()
#************************************************************************
# Timeseries 1 figure
if True:
fig, (ax1, ax2, ax3, ax4) = plt.subplots(4, figsize=(8, 10), sharex=True)
# Globe
sst = read_csv_2(DATALOC + "fig_2_data.csv", "SST", "Global")
at = read_csv_2(DATALOC + "fig_2_data.csv", "Air Temperature", "Global")
utils.plot_ts_panel(ax1, [sst, at], "-", "temperature", loc=LEGEND_LOC)
ax1.text(0.02, 0.88, "(a) Global", transform=ax1.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Northern Extra-Tropics
sst = read_csv_2(DATALOC + "fig_2_data.csv", "SST", "Northern Extra-Tropics")
at = read_csv_2(DATALOC + "fig_2_data.csv", "Air Temperature", "Northern Extra-Tropics")
utils.plot_ts_panel(ax2, [sst, at], "-", "temperature", loc="")
ax2.text(0.02, 0.88, "(b) Northern Extra-Tropics", transform=ax2.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Tropics
sst = read_csv_2(DATALOC + "fig_2_data.csv", "SST", "Tropics")
at = read_csv_2(DATALOC + "fig_2_data.csv", "Air Temperature", "Tropics")
utils.plot_ts_panel(ax3, [sst, at], "-", "temperature", loc="")
ax3.text(0.02, 0.88, "(c) Tropics", transform=ax3.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
# Southern Extra-Tropics
sst = read_csv_2(DATALOC + "fig_2_data.csv", "SST", "Southern Extra-Tropics")
at = read_csv_2(DATALOC + "fig_2_data.csv", "Air Temperature", "Southern Extra-Tropics")
utils.plot_ts_panel(ax4, [sst, at], "-", "temperature", loc="")
ax4.text(0.02, 0.88, "(d) Southern Extra-Tropics", transform=ax4.transAxes, \
fontsize=settings.LABEL_FONTSIZE)
plt.xlim([1948, int(settings.YEAR)+2])
for tick in ax4.xaxis.get_major_ticks():
tick.label.set_fontsize(settings.FONTSIZE)
for ax in [ax1, ax2, ax3, ax4]:
for tick in ax.yaxis.get_major_ticks():
tick.label.set_fontsize(settings.FONTSIZE)
ax.xaxis.set_minor_locator(MultipleLocator(5))
ax.set_ylim([-0.7, 0.9])
fig.text(0.01, 0.55, "Anomaly ("+r'$^\circ$'+"C)", fontsize=settings.FONTSIZE, rotation="vertical")
fig.subplots_adjust(right=0.98, top=0.98, bottom=0.04, hspace=0.001)
plt.savefig(settings.IMAGELOC+"NMT_ts_sst{}".format(settings.OUTFMT))
plt.close()
#************************************************************************
# Anomaly figure
if True:
# Read in ERA anomalies
cube_list = iris.load(os.path.join(DATALOC, "CLASSnmat_2.0.0.0_anomaly_1981_2010_ANNMEAN_{}.nc".format(settings.YEAR)))
for cube in cube_list:
if cube.var_name == "t10m_anomaly":
break
cube.coord('latitude').guess_bounds()
cube.coord('longitude').guess_bounds()
bounds = [-100, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 100]
utils.plot_smooth_map_iris(settings.IMAGELOC + "NMT_CLASS_anomalies_{}".format(settings.YEAR), cube[0], settings.COLOURMAP_DICT["temperature"], bounds, "Anomalies from 1981-2010 ("+r'$^{\circ}$'+"C)", title="CLASSNMAT")
utils.plot_smooth_map_iris(settings.IMAGELOC + "p2.1_NMT_CLASS_anomalies_{}".format(settings.YEAR), cube[0], settings.COLOURMAP_DICT["temperature"], bounds, "Anomalies from 1981-2010 ("+r'$^{\circ}$'+"C)", figtext="(f) Night Marine Air Temperature")
#************************************************************************
# Anomaly figure
if True:
# Read in ERA anomalies
cube_list = iris.load(os.path.join(DATALOC, "fig_3_data.nc".format(settings.YEAR)))
cube = cube_list[0]
cube.coord('latitude').guess_bounds()
cube.coord('longitude').guess_bounds()
bounds = [-100, -0.5, -0.3, -0.2, -0.1, 0, 0.1, 0.2, 0.3, 0.5, 100]
utils.plot_smooth_map_iris(settings.IMAGELOC + "NMT_trend_diff_{}".format(settings.YEAR), cube, settings.COLOURMAP_DICT["temperature"], bounds, "Trend ("+r'$^{\circ}$'+"C decade "+r'$^{-1}$'+")", title="MAT-SST trend")
return # run_all_plots
#************************************************************************
if __name__ == "__main__":
run_all_plots()
#************************************************************************
# END
#************************************************************************
| 43.25
| 257
| 0.55192
|
357334ed6468e7fc640e411ea0100b94c80aeeda
| 44
|
py
|
Python
|
python/cendalytics/tfidf/core/bp/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/cendalytics/tfidf/core/bp/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
python/cendalytics/tfidf/core/bp/__init__.py
|
jiportilla/ontology
|
8a66bb7f76f805c64fc76cfc40ab7dfbc1146f40
|
[
"MIT"
] | null | null | null |
from .vectorspace_api import VectorSpaceAPI
| 22
| 43
| 0.886364
|
8b0ad63bf9a718768b183ff428443ee080443c18
| 1,408
|
py
|
Python
|
.cmake-format.py
|
devjgm/google-cloud-cpp-common
|
f7b259caa09a8662847ca8463c3c28e8d285a403
|
[
"Apache-2.0"
] | 29
|
2019-05-03T15:03:48.000Z
|
2021-06-04T06:15:55.000Z
|
.cmake-format.py
|
devjgm/google-cloud-cpp-common
|
f7b259caa09a8662847ca8463c3c28e8d285a403
|
[
"Apache-2.0"
] | 1,199
|
2019-05-03T13:05:54.000Z
|
2020-06-01T18:58:26.000Z
|
.cmake-format.py
|
devjgm/google-cloud-cpp-common
|
f7b259caa09a8662847ca8463c3c28e8d285a403
|
[
"Apache-2.0"
] | 18
|
2019-05-02T20:53:06.000Z
|
2021-10-07T21:29:36.000Z
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
tab_size = 4
separate_ctrl_name_with_space = True
enable_sort = True
autosort = True
additional_commands = {
"externalproject_add": {
"flags": [
],
"kwargs": {
"BUILD_COMMAND": "+",
"BUILD_BYPRODUCTS": "+",
"CMAKE_ARGS": "+",
"COMMAND": "+",
"CONFIGURE_COMMAND": "+",
"DEPENDS": "+",
"DOWNLOAD_COMMAND": "+",
"EXCLUDE_FROM_ALL": 1,
"INSTALL_COMMAND": "+",
"INSTALL_DIR": 1,
"LIST_SEPARATOR": 1,
"TEST_COMMAND": "+",
"LOG_BUILD": 1,
"LOG_CONFIGURE": 1,
"LOG_DOWNLOAD": 1,
"LOG_INSTALL": 1,
"PREFIX": 1,
"URL": 1,
"URL_HASH": 1,
"BUILD_ALWAYS": 1,
}
}
}
| 29.333333
| 74
| 0.564631
|
d78dfd269d93137ff4b88b94a7bb7eca2cdb32d6
| 891
|
py
|
Python
|
onnx_test/mcrec_onnx_model_test.py
|
chencsgit/luoxi_models
|
ea9e69dfb81b29f41ed92c75faacf81114c69a2f
|
[
"Apache-2.0"
] | 58
|
2022-03-28T06:16:51.000Z
|
2022-03-31T07:36:35.000Z
|
onnx_test/mcrec_onnx_model_test.py
|
chencsgit/luoxi_models
|
ea9e69dfb81b29f41ed92c75faacf81114c69a2f
|
[
"Apache-2.0"
] | null | null | null |
onnx_test/mcrec_onnx_model_test.py
|
chencsgit/luoxi_models
|
ea9e69dfb81b29f41ed92c75faacf81114c69a2f
|
[
"Apache-2.0"
] | 4
|
2022-03-28T06:23:25.000Z
|
2022-03-30T13:45:07.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2022 The Luoxi Team.
# All rights reserved.
# This source code is licensed under the Apache 2.0 license
# found in the LICENSE file in the root directory.
import torch
def mock_orec_data():
num_samples = 2
mock_data = {}
hist_seq = [[int(e) for e in "44,172,602,602,163,258,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0".strip().split(",")]]
mock_data['hist_seq'] = torch.LongTensor(hist_seq).repeat((num_samples, 1))
cand = [[int("672")]]
mock_data['cand'] = torch.LongTensor(cand).repeat((num_samples, 1)).squeeze(1)
prior_score = [[float("0.1192")]]
mock_data['prior_score'] = torch.Tensor(prior_score).repeat((num_samples, 1)).squeeze(1)
label = [[int("0")]]
mock_data['label'] = torch.LongTensor(label).repeat((num_samples, 1)).squeeze(1)
return mock_data
| 33
| 166
| 0.645342
|
3081776d96a269eaa1f1ba604645c78aa9cc936e
| 32,698
|
py
|
Python
|
tests/i18n/test_extraction.py
|
neon-jungle/django
|
d8f00e1918ce4df76920f3d79bc8d805fa69e29e
|
[
"BSD-3-Clause"
] | 1
|
2021-04-11T08:16:02.000Z
|
2021-04-11T08:16:02.000Z
|
tests/i18n/test_extraction.py
|
neon-jungle/django
|
d8f00e1918ce4df76920f3d79bc8d805fa69e29e
|
[
"BSD-3-Clause"
] | null | null | null |
tests/i18n/test_extraction.py
|
neon-jungle/django
|
d8f00e1918ce4df76920f3d79bc8d805fa69e29e
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import io
import os
import re
import shutil
import sys
import time
from unittest import SkipTest, skipUnless
import warnings
from django.conf import settings
from django.core import management
from django.core.management import execute_from_command_line
from django.core.management.utils import find_command
from django.test import SimpleTestCase
from django.test import override_settings
from django.utils.encoding import force_text
from django.utils._os import upath
from django.utils import six
from django.utils.six import StringIO
from django.utils.translation import TranslatorCommentWarning
LOCALE = 'de'
has_xgettext = find_command('xgettext')
this_directory = os.path.dirname(upath(__file__))
@skipUnless(has_xgettext, 'xgettext is mandatory for extraction tests')
class ExtractorTests(SimpleTestCase):
test_dir = os.path.abspath(os.path.join(this_directory, 'commands'))
PO_FILE = 'locale/%s/LC_MESSAGES/django.po' % LOCALE
def setUp(self):
self._cwd = os.getcwd()
def _rmrf(self, dname):
if os.path.commonprefix([self.test_dir, os.path.abspath(dname)]) != self.test_dir:
return
shutil.rmtree(dname)
def rmfile(self, filepath):
if os.path.exists(filepath):
os.remove(filepath)
def tearDown(self):
os.chdir(self.test_dir)
try:
self._rmrf('locale/%s' % LOCALE)
except OSError:
pass
os.chdir(self._cwd)
def _run_makemessages(self, **options):
os.chdir(self.test_dir)
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], verbosity=2,
stdout=out, **options)
output = out.getvalue()
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = fp.read()
return output, po_contents
def assertMsgId(self, msgid, s, use_quotes=True):
q = '"'
if use_quotes:
msgid = '"%s"' % msgid
q = "'"
needle = 'msgid %s' % msgid
msgid = re.escape(msgid)
return self.assertTrue(re.search('^msgid %s' % msgid, s, re.MULTILINE), 'Could not find %(q)s%(n)s%(q)s in generated PO file' % {'n': needle, 'q': q})
def assertNotMsgId(self, msgid, s, use_quotes=True):
if use_quotes:
msgid = '"%s"' % msgid
msgid = re.escape(msgid)
return self.assertTrue(not re.search('^msgid %s' % msgid, s, re.MULTILINE))
def _assertPoLocComment(self, assert_presence, po_filename, line_number, *comment_parts):
with open(po_filename, 'r') as fp:
po_contents = force_text(fp.read())
if os.name == 'nt':
# #: .\path\to\file.html:123
cwd_prefix = '%s%s' % (os.curdir, os.sep)
else:
# #: path/to/file.html:123
cwd_prefix = ''
parts = ['#: ']
parts.append(os.path.join(cwd_prefix, *comment_parts))
if line_number is not None:
parts.append(':%d' % line_number)
needle = ''.join(parts)
if assert_presence:
return self.assertIn(needle, po_contents, '"%s" not found in final .po file.' % needle)
else:
return self.assertNotIn(needle, po_contents, '"%s" shouldn\'t be in final .po file.' % needle)
def assertLocationCommentPresent(self, po_filename, line_number, *comment_parts):
"""
self.assertLocationCommentPresent('django.po', 42, 'dirA', 'dirB', 'foo.py')
verifies that the django.po file has a gettext-style location comment of the form
`#: dirA/dirB/foo.py:42`
(or `#: .\dirA\dirB\foo.py:42` on Windows)
None can be passed for the line_number argument to skip checking of the :42 suffix part.
"""
return self._assertPoLocComment(True, po_filename, line_number, *comment_parts)
def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts):
"""Check the opposite of assertLocationComment()"""
return self._assertPoLocComment(False, po_filename, line_number, *comment_parts)
def assertRecentlyModified(self, path):
"""
Assert that file was recently modified (modification time was less than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertLess(delta, 10, "%s was recently modified" % path)
def assertNotRecentlyModified(self, path):
"""
Assert that file was not recently modified (modification time was more than 10 seconds ago).
"""
delta = time.time() - os.stat(path).st_mtime
self.assertGreater(delta, 10, "%s wasn't recently modified" % path)
class BasicExtractorTests(ExtractorTests):
def test_comments_extractor(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with io.open(self.PO_FILE, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
self.assertIn('#. Translators: This comment should be extracted', po_contents)
self.assertNotIn('This comment should not be extracted', po_contents)
# Comments in templates
self.assertIn('#. Translators: Django template comment for translators', po_contents)
self.assertIn("#. Translators: Django comment block for translators\n#. string's meaning unveiled", po_contents)
self.assertIn('#. Translators: One-line translator comment #1', po_contents)
self.assertIn('#. Translators: Two-line translator comment #1\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #2', po_contents)
self.assertIn('#. Translators: Two-line translator comment #2\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #3', po_contents)
self.assertIn('#. Translators: Two-line translator comment #3\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #4', po_contents)
self.assertIn('#. Translators: Two-line translator comment #4\n#. continued here.', po_contents)
self.assertIn('#. Translators: One-line translator comment #5 -- with non ASCII characters: áéíóúö', po_contents)
self.assertIn('#. Translators: Two-line translator comment #5 -- with non ASCII characters: áéíóúö\n#. continued here.', po_contents)
def test_templatize_trans_tag(self):
# ticket #11240
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('Literal with a percent symbol at the end %%', po_contents)
self.assertMsgId('Literal with a percent %% symbol in the middle', po_contents)
self.assertMsgId('Completed 50%% of all the tasks', po_contents)
self.assertMsgId('Completed 99%% of all the tasks', po_contents)
self.assertMsgId("Shouldn't double escape this sequence: %% (two percent signs)", po_contents)
self.assertMsgId("Shouldn't double escape this sequence %% either", po_contents)
self.assertMsgId("Looks like a str fmt spec %%s but shouldn't be interpreted as such", po_contents)
self.assertMsgId("Looks like a str fmt spec %% o but shouldn't be interpreted as such", po_contents)
def test_templatize_blocktrans_tag(self):
# ticket #11966
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('I think that 100%% is more that 50%% of anything.', po_contents)
self.assertMsgId('I think that 100%% is more that 50%% of %(obj)s.', po_contents)
self.assertMsgId("Blocktrans extraction shouldn't double escape this: %%, a=%(a)s", po_contents)
def test_blocktrans_trimmed(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# should not be trimmed
self.assertNotMsgId('Text with a few line breaks.', po_contents)
# should be trimmed
self.assertMsgId("Again some text with a few line breaks, this time should be trimmed.", po_contents)
# #21406 -- Should adjust for eaten line numbers
self.assertMsgId("I'm on line 97", po_contents)
self.assertLocationCommentPresent(self.PO_FILE, 97, 'templates', 'test.html')
def test_force_en_us_locale(self):
"""Value of locale-munging option used by the command is the right one"""
from django.core.management.commands.makemessages import Command
self.assertTrue(Command.leave_locale_alone)
def test_extraction_error(self):
os.chdir(self.test_dir)
self.assertRaises(SyntaxError, management.call_command, 'makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0)
with self.assertRaises(SyntaxError) as context_manager:
management.call_command('makemessages', locale=[LOCALE], extensions=['tpl'], verbosity=0)
six.assertRegex(
self, str(context_manager.exception),
r'Translation blocks must not include other block tags: blocktrans \(file templates[/\\]template_with_error\.tpl, line 3\)'
)
# Check that the temporary file was cleaned up
self.assertFalse(os.path.exists('./templates/template_with_error.tpl.py'))
def test_unicode_decode_error(self):
os.chdir(self.test_dir)
shutil.copyfile('./not_utf8.sample', './not_utf8.txt')
self.addCleanup(self.rmfile, os.path.join(self.test_dir, 'not_utf8.txt'))
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("UnicodeDecodeError: skipped file not_utf8.txt in .",
force_text(out.getvalue()))
def test_extraction_warning(self):
"""test xgettext warning about multiple bare interpolation placeholders"""
os.chdir(self.test_dir)
shutil.copyfile('./code.sample', './code_sample.py')
self.addCleanup(self.rmfile, os.path.join(self.test_dir, 'code_sample.py'))
out = StringIO()
management.call_command('makemessages', locale=[LOCALE], stdout=out)
self.assertIn("code_sample.py:4", force_text(out.getvalue()))
def test_template_message_context_extractor(self):
"""
Ensure that message contexts are correctly extracted for the
{% trans %} and {% blocktrans %} template tags.
Refs #14806.
"""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# {% trans %}
self.assertIn('msgctxt "Special trans context #1"', po_contents)
self.assertMsgId("Translatable literal #7a", po_contents)
self.assertIn('msgctxt "Special trans context #2"', po_contents)
self.assertMsgId("Translatable literal #7b", po_contents)
self.assertIn('msgctxt "Special trans context #3"', po_contents)
self.assertMsgId("Translatable literal #7c", po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context #1"', po_contents)
self.assertMsgId("Translatable literal #8a", po_contents)
self.assertIn('msgctxt "Special blocktrans context #2"', po_contents)
self.assertMsgId("Translatable literal #8b-singular", po_contents)
self.assertIn("Translatable literal #8b-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #3"', po_contents)
self.assertMsgId("Translatable literal #8c-singular", po_contents)
self.assertIn("Translatable literal #8c-plural", po_contents)
self.assertIn('msgctxt "Special blocktrans context #4"', po_contents)
self.assertMsgId("Translatable literal #8d %(a)s", po_contents)
def test_context_in_single_quotes(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
# {% trans %}
self.assertIn('msgctxt "Context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Context wrapped in single quotes"', po_contents)
# {% blocktrans %}
self.assertIn('msgctxt "Special blocktrans context wrapped in double quotes"', po_contents)
self.assertIn('msgctxt "Special blocktrans context wrapped in single quotes"', po_contents)
def test_template_comments(self):
"""Template comment tags on the same line of other constructs (#19552)"""
os.chdir(self.test_dir)
# Test detection/end user reporting of old, incorrect templates
# translator comments syntax
with warnings.catch_warnings(record=True) as ws:
warnings.simplefilter('always')
management.call_command('makemessages', locale=[LOCALE], extensions=['thtml'], verbosity=0)
self.assertEqual(len(ws), 3)
for w in ws:
self.assertTrue(issubclass(w.category, TranslatorCommentWarning))
six.assertRegex(
self, str(ws[0].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #1' \(file templates[/\\]comments.thtml, line 4\) was ignored, because it wasn't the last item on the line\."
)
six.assertRegex(
self, str(ws[1].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #3' \(file templates[/\\]comments.thtml, line 6\) was ignored, because it wasn't the last item on the line\."
)
six.assertRegex(
self, str(ws[2].message),
r"The translator-targeted comment 'Translators: ignored i18n comment #4' \(file templates[/\\]comments.thtml, line 8\) was ignored, because it wasn't the last item on the line\."
)
# Now test .po file contents
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('Translatable literal #9a', po_contents)
self.assertNotIn('ignored comment #1', po_contents)
self.assertNotIn('Translators: ignored i18n comment #1', po_contents)
self.assertMsgId("Translatable literal #9b", po_contents)
self.assertNotIn('ignored i18n comment #2', po_contents)
self.assertNotIn('ignored comment #2', po_contents)
self.assertMsgId('Translatable literal #9c', po_contents)
self.assertNotIn('ignored comment #3', po_contents)
self.assertNotIn('ignored i18n comment #3', po_contents)
self.assertMsgId('Translatable literal #9d', po_contents)
self.assertNotIn('ignored comment #4', po_contents)
self.assertMsgId('Translatable literal #9e', po_contents)
self.assertNotIn('ignored comment #5', po_contents)
self.assertNotIn('ignored i18n comment #4', po_contents)
self.assertMsgId('Translatable literal #9f', po_contents)
self.assertIn('#. Translators: valid i18n comment #5', po_contents)
self.assertMsgId('Translatable literal #9g', po_contents)
self.assertIn('#. Translators: valid i18n comment #6', po_contents)
self.assertMsgId('Translatable literal #9h', po_contents)
self.assertIn('#. Translators: valid i18n comment #7', po_contents)
self.assertMsgId('Translatable literal #9i', po_contents)
six.assertRegex(self, po_contents, r'#\..+Translators: valid i18n comment #8')
six.assertRegex(self, po_contents, r'#\..+Translators: valid i18n comment #9')
self.assertMsgId("Translatable literal #9j", po_contents)
def test_makemessages_find_files(self):
"""
Test that find_files only discover files having the proper extensions.
"""
from django.core.management.commands.makemessages import Command
cmd = Command()
cmd.ignore_patterns = ['CVS', '.*', '*~', '*.pyc']
cmd.symlinks = False
cmd.domain = 'django'
cmd.extensions = ['html', 'txt', 'py']
cmd.verbosity = 0
cmd.locale_paths = []
cmd.default_locale_path = os.path.join(self.test_dir, 'locale')
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.py', '.html', '.txt'}), set())
cmd.extensions = ['js']
cmd.domain = 'djangojs'
found_files = cmd.find_files(self.test_dir)
found_exts = set([os.path.splitext(tfile.file)[1] for tfile in found_files])
self.assertEqual(found_exts.difference({'.js'}), set())
class JavascriptExtractorTests(ExtractorTests):
PO_FILE = 'locale/%s/LC_MESSAGES/djangojs.po' % LOCALE
def test_javascript_literals(self):
os.chdir(self.test_dir)
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId('This literal should be included.', po_contents)
self.assertMsgId('gettext_noop should, too.', po_contents)
self.assertMsgId('This one as well.', po_contents)
self.assertMsgId(r'He said, \"hello\".', po_contents)
self.assertMsgId("okkkk", po_contents)
self.assertMsgId("TEXT", po_contents)
self.assertMsgId("It's at http://example.com", po_contents)
self.assertMsgId("String", po_contents)
self.assertMsgId("/* but this one will be too */ 'cause there is no way of telling...", po_contents)
self.assertMsgId("foo", po_contents)
self.assertMsgId("bar", po_contents)
self.assertMsgId("baz", po_contents)
self.assertMsgId("quz", po_contents)
self.assertMsgId("foobar", po_contents)
@override_settings(
STATIC_ROOT=os.path.join(this_directory, 'commands', 'static/'),
MEDIA_ROOT=os.path.join(this_directory, 'commands', 'media_root/'))
def test_media_static_dirs_ignored(self):
"""
Regression test for #23583.
"""
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
self.assertNotMsgId("Content from STATIC_ROOT should not be included", po_contents)
@override_settings(STATIC_ROOT=None, MEDIA_ROOT='')
def test_default_root_settings(self):
"""
Regression test for #23717.
"""
_, po_contents = self._run_makemessages(domain='djangojs')
self.assertMsgId("Static content inside app should be included.", po_contents)
class IgnoredExtractorTests(ExtractorTests):
def test_ignore_directory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
os.path.join('ignore_dir', '*'),
])
self.assertIn("ignoring directory ignore_dir", out)
self.assertMsgId('This literal should be included.', po_contents)
self.assertNotMsgId('This should be ignored.', po_contents)
def test_ignore_subdirectory(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'templates/*/ignore.html',
'templates/subdir/*',
])
self.assertIn("ignoring directory subdir", out)
self.assertNotMsgId('This subdir should be ignored too.', po_contents)
def test_ignore_file_patterns(self):
out, po_contents = self._run_makemessages(ignore_patterns=[
'xxx_*',
])
self.assertIn("ignoring file xxx_ignored.html", out)
self.assertNotMsgId('This should be ignored too.', po_contents)
@override_settings(
STATIC_ROOT=os.path.join(this_directory, 'commands', 'static/'),
MEDIA_ROOT=os.path.join(this_directory, 'commands', 'media_root/'))
def test_media_static_dirs_ignored(self):
out, _ = self._run_makemessages()
self.assertIn("ignoring directory static", out)
self.assertIn("ignoring directory media_root", out)
class SymlinkExtractorTests(ExtractorTests):
def setUp(self):
super(SymlinkExtractorTests, self).setUp()
self.symlinked_dir = os.path.join(self.test_dir, 'templates_symlinked')
def tearDown(self):
super(SymlinkExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
os.remove(self.symlinked_dir)
except OSError:
pass
os.chdir(self._cwd)
def test_symlink(self):
# On Python < 3.2 os.symlink() exists only on Unix
if hasattr(os, 'symlink'):
if os.path.exists(self.symlinked_dir):
self.assertTrue(os.path.islink(self.symlinked_dir))
else:
# On Python >= 3.2) os.symlink() exists always but then can
# fail at runtime when user hasn't the needed permissions on
# WIndows versions that support symbolink links (>= 6/Vista).
# See Python issue 9333 (http://bugs.python.org/issue9333).
# Skip the test in that case
try:
os.symlink(os.path.join(self.test_dir, 'templates'), self.symlinked_dir)
except (OSError, NotImplementedError):
raise SkipTest("os.symlink() is available on this OS but can't be used by this user.")
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, symlinks=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This literal should be included.', po_contents)
self.assertIn('templates_symlinked/test.html', po_contents)
class CopyPluralFormsExtractorTests(ExtractorTests):
PO_FILE_ES = 'locale/es/LC_MESSAGES/django.po'
def tearDown(self):
super(CopyPluralFormsExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
self._rmrf('locale/es')
except OSError:
pass
os.chdir(self._cwd)
def test_copy_plural_forms(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertIn('Plural-Forms: nplurals=2; plural=(n != 1)', po_contents)
def test_override_plural_forms(self):
"""Ticket #20311."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=['es'], extensions=['djtpl'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_ES))
with io.open(self.PO_FILE_ES, 'r', encoding='utf-8') as fp:
po_contents = fp.read()
found = re.findall(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', po_contents, re.MULTILINE | re.DOTALL)
self.assertEqual(1, len(found))
class NoWrapExtractorTests(ExtractorTests):
def test_no_wrap_enabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=True)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This literal should also be included wrapped or not wrapped depending on the use of the --no-wrap option.', po_contents)
def test_no_wrap_disabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_wrap=False)
self.assertTrue(os.path.exists(self.PO_FILE))
with open(self.PO_FILE, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('""\n"This literal should also be included wrapped or not wrapped depending on the "\n"use of the --no-wrap option."', po_contents, use_quotes=False)
class LocationCommentsTests(ExtractorTests):
def test_no_location_enabled(self):
"""Behavior is correct if --no-location switch is specified. See #16903."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=True)
self.assertTrue(os.path.exists(self.PO_FILE))
self.assertLocationCommentNotPresent(self.PO_FILE, 55, 'templates', 'test.html.py')
def test_no_location_disabled(self):
"""Behavior is correct if --no-location switch isn't specified."""
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0, no_location=False)
self.assertTrue(os.path.exists(self.PO_FILE))
# #16903 -- Standard comment with source file relative path should be present
self.assertLocationCommentPresent(self.PO_FILE, 55, 'templates', 'test.html')
# #21208 -- Leaky paths in comments on Windows e.g. #: path\to\file.html.py:123
self.assertLocationCommentNotPresent(self.PO_FILE, None, 'templates', 'test.html.py')
class KeepPotFileExtractorTests(ExtractorTests):
POT_FILE = 'locale/django.pot'
def setUp(self):
super(KeepPotFileExtractorTests, self).setUp()
def tearDown(self):
super(KeepPotFileExtractorTests, self).tearDown()
os.chdir(self.test_dir)
try:
os.unlink(self.POT_FILE)
except OSError:
pass
os.chdir(self._cwd)
def test_keep_pot_disabled_by_default(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_explicitly_disabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=False)
self.assertFalse(os.path.exists(self.POT_FILE))
def test_keep_pot_enabled(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=[LOCALE], verbosity=0,
keep_pot=True)
self.assertTrue(os.path.exists(self.POT_FILE))
class MultipleLocaleExtractionTests(ExtractorTests):
PO_FILE_PT = 'locale/pt/LC_MESSAGES/django.po'
PO_FILE_DE = 'locale/de/LC_MESSAGES/django.po'
LOCALES = ['pt', 'de', 'ch']
def tearDown(self):
os.chdir(self.test_dir)
for locale in self.LOCALES:
try:
self._rmrf('locale/%s' % locale)
except OSError:
pass
os.chdir(self._cwd)
def test_multiple_locales(self):
os.chdir(self.test_dir)
management.call_command('makemessages', locale=['pt', 'de'], verbosity=0)
self.assertTrue(os.path.exists(self.PO_FILE_PT))
self.assertTrue(os.path.exists(self.PO_FILE_DE))
class ExcludedLocaleExtractionTests(ExtractorTests):
LOCALES = ['en', 'fr', 'it']
PO_FILE = 'locale/%s/LC_MESSAGES/django.po'
test_dir = os.path.abspath(os.path.join(this_directory, 'exclude'))
def _set_times_for_all_po_files(self):
"""
Set access and modification times to the Unix epoch time for all the .po files.
"""
for locale in self.LOCALES:
os.utime(self.PO_FILE % locale, (0, 0))
def setUp(self):
super(ExcludedLocaleExtractionTests, self).setUp()
os.chdir(self.test_dir) # ExtractorTests.tearDown() takes care of restoring.
shutil.copytree('canned_locale', 'locale')
self._set_times_for_all_po_files()
self.addCleanup(self._rmrf, os.path.join(self.test_dir, 'locale'))
def test_command_help(self):
old_stdout, old_stderr = sys.stdout, sys.stderr
sys.stdout, sys.stderr = StringIO(), StringIO()
try:
# `call_command` bypasses the parser; by calling
# `execute_from_command_line` with the help subcommand we
# ensure that there are no issues with the parser itself.
execute_from_command_line(['django-admin', 'help', 'makemessages'])
finally:
sys.stdout, sys.stderr = old_stdout, old_stderr
def test_one_locale_excluded(self):
management.call_command('makemessages', exclude=['it'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded(self):
management.call_command('makemessages', exclude=['it', 'fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_one_locale_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr'], exclude=['fr'], stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
def test_multiple_locales_excluded_with_locale(self):
management.call_command('makemessages', locale=['en', 'fr', 'it'], exclude=['fr', 'it'],
stdout=StringIO())
self.assertRecentlyModified(self.PO_FILE % 'en')
self.assertNotRecentlyModified(self.PO_FILE % 'fr')
self.assertNotRecentlyModified(self.PO_FILE % 'it')
class CustomLayoutExtractionTests(ExtractorTests):
def setUp(self):
self._cwd = os.getcwd()
self.test_dir = os.path.join(this_directory, 'project_dir')
def test_no_locale_raises(self):
os.chdir(self.test_dir)
with six.assertRaisesRegex(self, management.CommandError,
"Unable to find a locale path to store translations for file"):
management.call_command('makemessages', locale=LOCALE, verbosity=0)
@override_settings(
LOCALE_PATHS=(os.path.join(
this_directory, 'project_dir', 'project_locale'),)
)
def test_project_locale_paths(self):
"""
Test that:
* translations for an app containing a locale folder are stored in that folder
* translations outside of that app are in LOCALE_PATHS[0]
"""
os.chdir(self.test_dir)
self.addCleanup(shutil.rmtree,
os.path.join(settings.LOCALE_PATHS[0], LOCALE), True)
self.addCleanup(shutil.rmtree,
os.path.join(self.test_dir, 'app_with_locale', 'locale', LOCALE), True)
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
project_de_locale = os.path.join(
self.test_dir, 'project_locale', 'de', 'LC_MESSAGES', 'django.po')
app_de_locale = os.path.join(
self.test_dir, 'app_with_locale', 'locale', 'de', 'LC_MESSAGES', 'django.po')
self.assertTrue(os.path.exists(project_de_locale))
self.assertTrue(os.path.exists(app_de_locale))
with open(project_de_locale, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This app has no locale directory', po_contents)
self.assertMsgId('This is a project-level string', po_contents)
with open(app_de_locale, 'r') as fp:
po_contents = force_text(fp.read())
self.assertMsgId('This app has a locale directory', po_contents)
| 45.795518
| 194
| 0.650865
|
d9a5d0eb2ea303f0395d02c234272502da218b47
| 6,391
|
py
|
Python
|
excessive.py
|
stephenkorankye/Basic-Hotel-System
|
4c3156c2a77f0aefdc347fc2e8255b2c5fd5a8ed
|
[
"MIT"
] | null | null | null |
excessive.py
|
stephenkorankye/Basic-Hotel-System
|
4c3156c2a77f0aefdc347fc2e8255b2c5fd5a8ed
|
[
"MIT"
] | null | null | null |
excessive.py
|
stephenkorankye/Basic-Hotel-System
|
4c3156c2a77f0aefdc347fc2e8255b2c5fd5a8ed
|
[
"MIT"
] | null | null | null |
# STEPHEN KORANKYE
import os
from subprocess import call
import sys
try:
from Tkinter import *
except ImportError:
from tkinter import *
try:
import ttk
py3 = False
except ImportError:
import tkinter.ttk as ttk
py3 = True
def click_checkinn():
call(["python", "checkin_gui_and_program.py"])
def click_list():
call(["python", "listgui.py"])
def click_checkout():
call(["python", "checkoutgui.py"])
def click_getinfo():
call(["python","getinfoui.py"])
class HOTEL_MANAGEMENT:
def __init__(self):
root = Tk()
'''This class configures and populates the toplevel most window.
top is the toplevel containing window.'''
_bgcolor = '#d9d9d9' # Color: 'gray85'
_fgcolor = '#000000'
_compcolor = '#ffffff'
_ana1color = '#ffffff'
_ana2color = '#ffffff'
font14 = "-family {Segoe UI} -size 15 -weight bold -slant " \
"roman -underline 0 -overstrike 0"
font16 = "-family {Swis721 BlkCn BT} -size 40 -weight bold " \
"-slant roman -underline 0 -overstrike 0"
font9 = "-family {Segoe UI} -size 9 -weight normal -slant " \
"roman -underline 0 -overstrike 0"
root.geometry("963x749+540+110")
root.title("HOTEL MANAGEMENT")
root.configure(background="#d9d9d9")
root.configure(highlightbackground="#d9d9d9")
root.configure(highlightcolor="black")
self.menubar = Menu(root,font=font9,bg=_bgcolor,fg=_fgcolor)
root.configure(menu = self.menubar)
self.Frame1 = Frame(root)
self.Frame1.place(relx=0.02, rely=0.03, relheight=0.94, relwidth=0.96)
self.Frame1.configure(relief=GROOVE)
self.Frame1.configure(borderwidth="2")
self.Frame1.configure(relief=GROOVE)
self.Frame1.configure(background="#d9d9d9")
self.Frame1.configure(highlightbackground="#d9d9d9")
self.Frame1.configure(highlightcolor="black")
self.Frame1.configure(width=925)
self.Message6 = Message(self.Frame1)
self.Message6.place(relx=0.09, rely=0.01, relheight=0.15, relwidth=0.86)
self.Message6.configure(background="#d9d9d9")
self.Message6.configure(font=font16)
self.Message6.configure(foreground="#000000")
self.Message6.configure(highlightbackground="#d9d9d9")
self.Message6.configure(highlightcolor="black")
self.Message6.configure(text='''HELLO , WELCOME''')
self.Message6.configure(width=791)
self.Button2 = Button(self.Frame1)
self.Button2.place(relx=0.18, rely=0.17, height=103, width=566)
self.Button2.configure(activebackground="#d9d9d9")
self.Button2.configure(activeforeground="#000000")
self.Button2.configure(background="#d9d9d9")
self.Button2.configure(disabledforeground="#bfbfbf")
self.Button2.configure(font=font14)
self.Button2.configure(foreground="#000000")
self.Button2.configure(highlightbackground="#d9d9d9")
self.Button2.configure(highlightcolor="black")
self.Button2.configure(pady="0")
self.Button2.configure(text='''1.CHECK INN''')
self.Button2.configure(width=566)
self.Button2.configure(command=click_checkinn)
self.Button3 = Button(self.Frame1)
self.Button3.place(relx=0.18, rely=0.33, height=93, width=566)
self.Button3.configure(activebackground="#d9d9d9")
self.Button3.configure(activeforeground="#000000")
self.Button3.configure(background="#d9d9d9")
self.Button3.configure(disabledforeground="#bfbfbf")
self.Button3.configure(font=font14)
self.Button3.configure(foreground="#000000")
self.Button3.configure(highlightbackground="#d9d9d9")
self.Button3.configure(highlightcolor="black")
self.Button3.configure(pady="0")
self.Button3.configure(text='''2.SHOW GUEST LIST''')
self.Button3.configure(width=566)
self.Button3.configure(command=click_list)
self.Button4 = Button(self.Frame1)
self.Button4.place(relx=0.18, rely=0.47, height=93, width=566)
self.Button4.configure(activebackground="#d9d9d9")
self.Button4.configure(activeforeground="#000000")
self.Button4.configure(background="#d9d9d9")
self.Button4.configure(disabledforeground="#bfbfbf")
self.Button4.configure(font=font14)
self.Button4.configure(foreground="#000000")
self.Button4.configure(highlightbackground="#d9d9d9")
self.Button4.configure(highlightcolor="black")
self.Button4.configure(pady="0")
self.Button4.configure(text='''3.CHECK OUT''')
self.Button4.configure(width=566)
self.Button4.configure(command=click_checkout)
self.Button5 = Button(self.Frame1)
self.Button5.place(relx=0.18, rely=0.61, height=103, width=566)
self.Button5.configure(activebackground="#d9d9d9")
self.Button5.configure(activeforeground="#000000")
self.Button5.configure(background="#d9d9d9")
self.Button5.configure(disabledforeground="#bfbfbf")
self.Button5.configure(font=font14)
self.Button5.configure(foreground="#000000")
self.Button5.configure(highlightbackground="#d9d9d9")
self.Button5.configure(highlightcolor="black")
self.Button5.configure(pady="0")
self.Button5.configure(text='''4.GET INFO OF ANY GUEST''')
self.Button5.configure(width=566)
self.Button5.configure(command=click_getinfo)
self.Button6 = Button(self.Frame1)
self.Button6.place(relx=0.18, rely=0.77, height=103, width=566)
self.Button6.configure(activebackground="#d9d9d9")
self.Button6.configure(activeforeground="#000000")
self.Button6.configure(background="#d9d9d9")
self.Button6.configure(disabledforeground="#bfbfbf")
self.Button6.configure(font=font14)
self.Button6.configure(foreground="#000000")
self.Button6.configure(highlightbackground="#d9d9d9")
self.Button6.configure(highlightcolor="black")
self.Button6.configure(pady="0")
self.Button6.configure(text='''5.EXIT''')
self.Button6.configure(width=566)
self.Button6.configure(command=quit)
root.mainloop()
if __name__ == '__main__':
GUUEST=HOTEL_MANAGEMENT()
| 39.695652
| 80
| 0.664372
|
22354b20f7b7f1dd7d25420b2c6de338a528d3f9
| 759
|
py
|
Python
|
virtualapp/migrations/0005_attendance.py
|
Halar1312/Virtual-School
|
3dced5f196f4283c0840fa81f35abe85e1f55efd
|
[
"MIT"
] | null | null | null |
virtualapp/migrations/0005_attendance.py
|
Halar1312/Virtual-School
|
3dced5f196f4283c0840fa81f35abe85e1f55efd
|
[
"MIT"
] | null | null | null |
virtualapp/migrations/0005_attendance.py
|
Halar1312/Virtual-School
|
3dced5f196f4283c0840fa81f35abe85e1f55efd
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.5 on 2021-07-25 20:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('virtualapp', '0004_registeredstudents'),
]
operations = [
migrations.CreateModel(
name='Attendance',
fields=[
('attendanceID', models.AutoField(primary_key=True, serialize=False)),
('studentId', models.IntegerField(default=0)),
('percentage', models.IntegerField(default=0)),
('date', models.CharField(max_length=50)),
('status', models.CharField(max_length=50)),
],
options={
'db_table': 'attendance',
},
),
]
| 28.111111
| 86
| 0.544137
|
056cc74b1c088bd4b92caaa4e7e0f0ba390f10ef
| 4,153
|
py
|
Python
|
channels/auth.py
|
milos-u/channels
|
a393253b0544bc2a6fd480dd9c53ed433a2fa103
|
[
"BSD-3-Clause"
] | null | null | null |
channels/auth.py
|
milos-u/channels
|
a393253b0544bc2a6fd480dd9c53ed433a2fa103
|
[
"BSD-3-Clause"
] | null | null | null |
channels/auth.py
|
milos-u/channels
|
a393253b0544bc2a6fd480dd9c53ed433a2fa103
|
[
"BSD-3-Clause"
] | null | null | null |
import functools
from django.contrib import auth
from .sessions import channel_and_http_session, channel_session, http_session
def transfer_user(from_session, to_session):
"""
Transfers user from HTTP session to channel session.
"""
if auth.BACKEND_SESSION_KEY in from_session and \
auth.SESSION_KEY in from_session:
to_session[auth.BACKEND_SESSION_KEY] = from_session[auth.BACKEND_SESSION_KEY]
to_session[auth.SESSION_KEY] = from_session[auth.SESSION_KEY]
def channel_session_user(func):
"""
Presents a message.user attribute obtained from a user ID in the channel
session, rather than in the http_session. Turns on channel session implicitly.
"""
@channel_session
@functools.wraps(func)
def inner(message, *args, **kwargs):
# If we didn't get a session, then we don't get a user
if not hasattr(message, "channel_session"):
raise ValueError("Did not see a channel session to get auth from")
if message.channel_session is None:
# Inner import to avoid reaching into models before load complete
from django.contrib.auth.models import AnonymousUser
message.user = AnonymousUser()
# Otherwise, be a bit naughty and make a fake Request with just
# a "session" attribute (later on, perhaps refactor contrib.auth to
# pass around session rather than request)
else:
fake_request = type("FakeRequest", (object, ), {"session": message.channel_session})
message.user = auth.get_user(fake_request)
# Run the consumer
return func(message, *args, **kwargs)
return inner
def http_session_user(func):
"""
Wraps a HTTP or WebSocket consumer (or any consumer of messages
that provides a "COOKIES" attribute) to provide both a "session"
attribute and a "user" attibute, like AuthMiddleware does.
This runs http_session() to get a session to hook auth off of.
If the user does not have a session cookie set, both "session"
and "user" will be None.
"""
@http_session
@functools.wraps(func)
def inner(message, *args, **kwargs):
# If we didn't get a session, then we don't get a user
if not hasattr(message, "http_session"):
raise ValueError("Did not see a http session to get auth from")
if message.http_session is None:
# Inner import to avoid reaching into models before load complete
from django.contrib.auth.models import AnonymousUser
message.user = AnonymousUser()
# Otherwise, be a bit naughty and make a fake Request with just
# a "session" attribute (later on, perhaps refactor contrib.auth to
# pass around session rather than request)
else:
fake_request = type("FakeRequest", (object, ), {"session": message.http_session})
message.user = auth.get_user(fake_request)
# Run the consumer
return func(message, *args, **kwargs)
return inner
def channel_session_user_from_http(func):
"""
Decorator that automatically transfers the user from HTTP sessions to
channel-based sessions, and returns the user as message.user as well.
Useful for things that consume e.g. websocket.connect
"""
@http_session_user
@channel_session
@functools.wraps(func)
def inner(message, *args, **kwargs):
if message.http_session is not None:
transfer_user(message.http_session, message.channel_session)
return func(message, *args, **kwargs)
return inner
def channel_and_http_session_user_from_http(func):
"""
Decorator that automatically transfers the user from HTTP sessions to
channel-based sessions, rehydrates the HTTP session, and returns the
user as message.user as well.
"""
@http_session_user
@channel_and_http_session
@functools.wraps(func)
def inner(message, *args, **kwargs):
if message.http_session is not None:
transfer_user(message.http_session, message.channel_session)
return func(message, *args, **kwargs)
return inner
| 39.552381
| 96
| 0.684084
|
460c7a9883fe11394528aa6a59a1f67ae9375f44
| 59,795
|
py
|
Python
|
qtgui/widgets/image.py
|
CogSciUOS/DeepLearningToolbox
|
bf07578b9486d8c48e25df357bc4b9963b513b46
|
[
"MIT"
] | 2
|
2019-09-01T01:38:59.000Z
|
2020-02-13T19:25:51.000Z
|
qtgui/widgets/image.py
|
CogSciUOS/DeepLearningToolbox
|
bf07578b9486d8c48e25df357bc4b9963b513b46
|
[
"MIT"
] | null | null | null |
qtgui/widgets/image.py
|
CogSciUOS/DeepLearningToolbox
|
bf07578b9486d8c48e25df357bc4b9963b513b46
|
[
"MIT"
] | null | null | null |
"""QWidgets for displaying images.
There are two Qt classes associated with images: `QPixmap` and
`QImage`. The `QPixmap` is an off-screen representation that
can be used as a paint device. A `QPixmap` cannot be manipulated
outside a GUI thread, while a `QImage` has no such restrictions.
If you plan to draw the same image more than once, use a `QPixmap`.
The `QImage` provides hardware-independent image representation that
allows direct access to pixel data, and can also be used as a paint
device. If you plan to modify an image, use `QImage`.
"""
# standard imports
from typing import Union, List, Iterable, Tuple
import logging
# Generic imports
import numpy as np
# Qt imports
from PyQt5.QtCore import Qt, QPoint, QPointF, QSize, QRect
from PyQt5.QtCore import pyqtSignal, pyqtSlot
from PyQt5.QtGui import QImage, QPainter, QPen, QTransform
from PyQt5.QtGui import QKeyEvent, QMouseEvent, QPaintEvent, QResizeEvent
from PyQt5.QtWidgets import QWidget, QMenu, QAction, QSizePolicy, QVBoxLayout
from PyQt5.QtWidgets import QToolTip
# toolbox imports
from dltb.base.data import Data
from dltb.base.meta import Metadata
from dltb.base.image import Image, Imagelike, ImageObservable
from dltb.base.image import BoundingBox, PointsBasedLocation, Region, Landmarks
from dltb.tool.image import ImageTool
from dltb.util.image import imresize, imwrite, grayscaleNormalized
from dltb.datasource import Imagesource
from toolbox import Toolbox
# GUI imports
from ..utils import QObserver, protect
from .navigation import QIndexControls
# logging
LOG = logging.getLogger(__name__)
# FIXME[todo]: add docstrings!
# FIXME[todo]: rename: QImageView is an old name ...
def imageToQImage(image: Imagelike) -> QImage:
if image is None:
return None
image = Image.as_array(image)
# To construct an 8-bit monochrome QImage, we need a
# 2-dimensional, uint8 numpy array
if image.ndim == 4:
image = image[0]
img_format = QImage.Format_Grayscale8
bytes_per_line = image.shape[1]
if image.ndim == 3:
# three channels -> probably rgb
if image.shape[2] == 3:
img_format = QImage.Format_RGB888
bytes_per_line *= 3
else:
image = image[:, :, 0]
if image.dtype != np.uint8:
if image.max() < 2:
image = (image * 255).astype(np.uint8)
else:
image = image.astype(np.uint8)
image = image.copy()
return QImage(image, image.shape[1], image.shape[0],
bytes_per_line, img_format)
def qimageFromFilename(filename: str) -> QImage:
return QImage(filename)
def qimageFromArray(array: np.ndarray, copy: bool = True) -> QImage:
array = array.astype(np.uint8) # FIXME[todo]: maybe scale to 0-255?
array = np.transpose(array, (1, 0, 2)).copy()
return QImage(array.data, array.shape[1], array.shape[0],
3 * array.shape[1], QImage.Format_RGB888)
def qimageToArray(qimage: QImage, copy: bool = True) -> np.ndarray:
# qimage = qimage.convertToFormat(QImage.Format_RGBA8888)
# channels = 4
# qimage = qimage.convertToFormat(QImage.Format_RGB32)
# channels = 4
qimage = qimage.convertToFormat(QImage.Format_RGB888)
channels = 3
width = qimage.width()
height = qimage.height()
ptr = qimage.constBits()
array = np.array(ptr).reshape(height, width, channels) # Copies the data
return array
LOG.info("Adapting dltb.base.image.Image: "
"adding static method 'imageToQImage'")
Image.as_qimage = staticmethod(imageToQImage)
Imagesource.add_loader('qt', qimageFromFilename)
Image.add_converter(QImage,
lambda image, copy: (qimageToArray(image, copy), copy),
target='array')
Image.add_converter(np.ndarray,
lambda image, copy: (qimageFromArray(image, copy), copy),
target='qimage')
class QImageView(QWidget, QObserver, Toolbox.Observer, qobservables={
Toolbox: {'input_changed'},
ImageObservable: {'image_changed'}}):
"""An experimental class to display images using the ``QImage``
class. This may be more efficient than using matplotlib for
displaying images.
Attributes
----------
_data: Data
The data object from which the image is taken (may be `None`)
_image: QImage
The image displayed
_overlay: QImage
Overlay for displaying on top of the image
_show_raw: bool
A flag indicating whether this QImageView will show
the raw input data, or the data actually fed to the network.
_showMetadata: bool
A flag indicating if metadata should be shown if available.
_toolTipActive : bool
A flag indicating whether tooltips shoould be shown.
Attributes
----------
_toolbox: Toolbox
The toolbox we want to observe.
_processed: bool
A flag indicating if the raw or the preprocessed input
data should be shown.
_activation: ActivationEngine
The activation Engine observed by this QImageView.
_image: QImage = None
_raw: np.ndarray = None
_marks: list = None
_overlay: QImage = None
_show_raw: bool = False
_keepAspect: bool = True
_showMetadata: bool = True
_metadata: Metadata = None
_metadata2: Metadata = None
_regions: list
_contextMenu: QMenu
A context menu allowing to save the current image.
Signals
-------
modeChanged = pyqtSignal(bool)
Toolbox interface
-----------------
A :py:class:`QImageView` to display the input image of the
Toolbox.
**Toolbox.Observer:**
The :py:class:`QImageView` can observe a
:py:class:`Toolbox` to always display the
current input image. If a 'input_changed' is reported, the
QImageView is updated to display the new input image.
ActivationTool interface
------------------------
**ActivationEngine.Observer:**
FIXME[todo]: Currently not implemented!
ImageGenerator Observer
-----------------------
The QImageView may be assigned to an :py:class:`ImageGenerator`.
If this is the case, the :py:class:`ImageGenerator` can provide
information on how the image is used by that tool and that
information can be visualized by the :py:class:`QImageView`.
"""
modeChanged = pyqtSignal(bool)
currentRegionChanged = pyqtSignal(int)
def __init__(self, toolbox: Toolbox = None, **kwargs) -> None:
"""Construct a new :py:class:`QImageView`.
Arguments
---------
parent: QWidget
"""
super().__init__(**kwargs)
self._raw: np.ndarray = None
self._show_raw = False
self._data = None
self._image = None
self._activation = None
self._marks = None
self._receptiveField = None
self._overlay = None
self._metadata = None
self._metadata2 = None
self._regions = []
self._currentRegion = -1 # -1 means no region selected
self._overlay = None
self._keepAspect = True
self._showMetadata = True
self._offset = QPointF(0, 0)
self._zoom = 1.0
self._mouseStartPosition = None
self._processed = False
self._toolTipActive = False
self.setToolTip(False)
self._toolbox: Toolbox = None
self.setToolbox(toolbox)
self._imageTool = None
self._imageToolRect = None
self._imageToolSize = None
self._imageToolTransform = None
#
# Prepare the context Menu
#
self._contextMenu = QMenu(self)
self._contextMenu.addAction(QAction('Info', self))
aspectAction = QAction('Keep Aspect Ratio', self)
aspectAction.setCheckable(True)
aspectAction.setChecked(self._keepAspect)
aspectAction.setStatusTip('Keep original aspect ratio of the image')
aspectAction.toggled.connect(self.onAspectClicked)
self._contextMenu.addAction(aspectAction)
self._contextMenu.addSeparator()
saveAction = QAction('Save image', self)
saveAction.setStatusTip('save the current image')
saveAction.triggered.connect(self.onSaveClicked)
self._contextMenu.addAction(saveAction)
self._contextMenu.addAction(QAction('Save image as ...', self))
# set button context menu policy
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.onContextMenu)
# set the size policy
#
# QSizePolicy.MinimumExpanding: The sizeHint() is minimal, and
# sufficient. The widget can make use of extra space, so it
# should get as much space as possible.
sizePolicy = QSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.MinimumExpanding)
# sizePolicy.setHeightForWidth(True)
self.setSizePolicy(sizePolicy)
# By default, a QWidget does not accept the keyboard focus, so
# we need to enable it explicitly: Qt.StrongFocus means to
# get focus by 'Tab' key as well as by mouse click.
self.setFocusPolicy(Qt.StrongFocus)
@staticmethod
def minimumSizeHint():
return QSize(100, 100) # default is QSize(-1, -1)
@staticmethod
def sizeHint():
return QSize(200, 200)
@staticmethod
def heightForWidth(width: int) -> int:
return width
@staticmethod
def hasHeightForWidth() -> bool:
return True
@protect
def onContextMenu(self, point: QPoint):
"""React to the display of a context menu.
Arguments
---------
point:
The location where the context menu is to be displayed, in
widgets coordinates (relative to the left upper corner
of this :py:class:`QImageView`).
"""
# show context menu
self._contextMenu.exec_(self.mapToGlobal(point))
@protect
def onAspectClicked(self, checked):
self.keepAspectRatio = checked
@protect
def onSaveClicked(self, checked):
if (self._raw is not None and
self._metadata is not None and
self._metadata.has_attribute('basename')):
# write the file
imwrite(self._metadata.get_attribute('basename'), self._raw)
@property
def keepAspectRatio(self) -> bool:
return self._keepAspect
@keepAspectRatio.setter
def keepAspectRatio(self, flag: bool) -> None:
if self._keepAspect != flag:
self._keepAspect = flag
self.update()
@property
def showMetadata(self) -> bool:
return self._showMetadata
@showMetadata.setter
def showMetadata(self, flag: bool) -> None:
if self._showMetadata != flag:
self._showMetadata = flag
self.update()
def setToolTip(self, active: bool = True) -> None:
"""Turn on/off the tooltips for this Widget.
The tooltip will display the index and value for the matrix
at the current mouse position.
Parameters
----------
active: bool
Will turn the tooltips on or off.
"""
self._toolTipActive = active
self.setMouseTracking(self._toolTipActive)
if not self._toolTipActive:
QToolTip.hideText()
def imageTool(self) -> ImageTool:
"""The :py:class:`ImageTool` used by this :py:class:`QImageView`.
"""
return self._imageTool
def setImageTool(self, imageTool: ImageTool) -> None:
"""Set an :py:class:`ImageTool` for this :py:class:`QImageView`.
Arguments
---------
imageTool:
The :py:class:`ImageTool` to be used by this
:py:class:`QImageView`. `None` means that no
:py:class:`ImageTool` is used.
"""
self._imageTool = imageTool
self._updateImageTool()
def _updateImageTool(self) -> None:
"""Update the :py:class:`ImageTool`
"""
imageTool, image = self._imageTool, self._raw
if imageTool is None or image is None:
self._imageToolRect = None
self._imageToolSize = None
self._imageToolTransform = None
else:
rectangle = imageTool.region_of_image(image)
self._imageToolRect = \
QRect(rectangle[0], rectangle[1],
rectangle[2]-rectangle[0], rectangle[3]-rectangle[1])
size = QSize(*imageTool.size_of_image(image))
imageSize = QSize(image.shape[1], image.shape[0])
self._imageToolSize = size
# imageToolTransform: map tool coordinates to image coordinates
self._imageToolTransform = QTransform()
self._imageToolTransform.translate(self._imageToolRect.x(),
self._imageToolRect.y())
self._imageToolTransform.scale(self._imageToolRect.width() /
size.width(),
self._imageToolRect.height() /
size.height())
self.update()
def setData(self, data: Data, attribute='array', index: int = 0) -> None:
"""Set the data to be displayed by this :py:class:`QImageView`.
"""
LOG.debug("QImageView.setData(%s, attribute='%s', index=%d): "
"new=%s, none=%s", data, attribute, index,
data is not self._data, data is None)
data = data[index] if data is not None and data.is_batch else data
if data is self._data:
return # nothing changed
self._data = data
self.setImage(None if data is None else getattr(data, attribute))
def setImagelike(self, image: Imagelike) -> None:
array = Image.as_array(image, dtype=np.uint8)
self.setImage(array)
def getImage(self) -> np.ndarray:
return self._raw
def setImage(self, image: np.ndarray) -> None:
"""Set the image to display.
"""
LOG.debug("QImageView.setImage(%s)", image is not None and image.shape)
self._raw = image
self._marks = []
self._regions = []
self._currentRegion = -1
self._image = imageToQImage(image)
if self._image is not None:
pass
# self.resize(self._image.size())
# self.updateGeometry()
self._updateImageTool()
def setMask(self, mask):
"""Set a mask to be displayed on top of the actual image.
Parameters
----------
mask: numpy.ndarray
If the mask has a different size than the actual image,
it will be resized to fit the image.
The mask is assumed to be in the (height,width) format.
"""
if mask is None:
self._overlay = None
else:
# We will use the mask values as alpha channel of a some
# solid (red) QImage. This QImage can be displayed on top
# of the actual image (which will be the background).
#
# Some care has to be taken here:
# (1) The QImage should have the same size as the background
# image.
# (2) To use the mask as the alpha channel of a QImage, it
# should be a contiguous uint8 numpy array of appropriate
# size.
#
print(f"A: mask: {mask.flags['C_CONTIGUOUS']}, dtype={mask.dtype}, shape={mask.shape}, min={mask.min()}, max={mask.max()}")
if not mask.flags['C_CONTIGUOUS'] or mask.dtype != np.uint8:
mask = np.ascontiguousarray(mask, np.uint8)
print(f"B: mask: {mask.flags['C_CONTIGUOUS']}, dtype={mask.dtype}, shape={mask.shape}, min={mask.min()}, max={mask.max()}")
# print(f"B: mask: resize({mask.shape} -> {(self._image.height(), self._image.width())})")
mask = imresize(mask, (self._image.width()-5, self._image.height()-5))
print(f"C: mask: {mask.flags['C_CONTIGUOUS']}, dtype={mask.dtype}, shape={mask.shape}, min={mask.min()}, max={mask.max()}")
mask = mask.astype(np.uint8)
print(f"D: mask: {mask.flags['C_CONTIGUOUS']}, dtype={mask.dtype}, shape={mask.shape}, min={mask.min()}, max={mask.max()}")
self._overlay = QImage(mask.shape[1], mask.shape[0],
QImage.Format_ARGB32)
self._overlay.fill(Qt.red)
alpha = QImage(mask, mask.shape[1], mask.shape[0],
mask.shape[1], QImage.Format_Alpha8)
painter = QPainter(self._overlay)
painter.setCompositionMode(QPainter.CompositionMode_DestinationIn)
painter.drawImage(QPoint(), alpha)
painter.end()
self.update()
def setReceptiveField(self, field: QRect, size: QSize = None) -> None:
"""Set the receptive field of a unit in an activation map.
@param field
A rectangle marking the boundaries of the rececptive field
@param size
The size of the reference system, relative to which the
receptive field is described (e.g., the shape of the input
layer of a :py:class:`Network`). If no reference size is
is provided, the coordinates are assumed to refer to pixel
positions in the image.
"""
if self._image is None:
self._receptiveField = None
elif size is None or size == self._image.size():
self._receptiveField = field
else:
# scale the receptive field to fit image coordinates
ratioX = self._image.width() / size.width()
ratioY = self._image.height() / size.height()
point1 = QPoint(field.left() * ratioX, field.top() * ratioY)
point2 = QPoint(field.right() * ratioX, field.bottom() * ratioY)
self._receptiveField = QRect(point1, point2)
self.update()
def addMark(self, rect: QRect):
"""Mark a rectangle in the image.
The rectangle provides the image coordinates to mark.
"""
self._marks.append(rect)
def setMetadata(self, metadata: Metadata, metadata2: Metadata = None):
"""Set metadata to be displayed in this View.
"""
self._metadata = metadata
self._metadata2 = metadata2
self.update()
def addRegion(self, region: Region) -> None:
"""Set metadata to be displayed in this View.
"""
self._regions.append(region)
self.update()
def addLandmarks(self, landmarks: Landmarks) -> None:
"""Add landmarks to be displayed on the current image.
"""
self._regions.append(Region(landmarks))
self.update()
def invalidateRegion(self, index: int = None) -> None:
"""Invalidate an image in this :py:class:`QMultiImageView`.
Invalid images will be displayed in a different way as
valid images.
Arguments
---------
index:
An index identifying the image to be invalidated. If no index is
provided, the currently selected image is used. If there is
no such index, the method will do nothing.
"""
if index is None:
index = self._currentRegion
if not self._regions or not 0 <= index < len(self._regions):
return
region = self._regions[index]
region.invalid = not getattr(region, 'invalid', False)
self.update()
def transform(self) -> QTransform:
if self._image is None:
return None
w = self._image.width()
h = self._image.height()
# scale maximally while maintaining aspect ratio
w_ratio = self.width() / w
h_ratio = self.height() / h
if self._keepAspect:
w_ratio = min(w_ratio, h_ratio)
h_ratio = w_ratio
w_ratio *= self._zoom
h_ratio *= self._zoom
# the rect is created such that it is centered on the
# current widget pane both horizontally and vertically
x = (self.width() - w * w_ratio) // 2
y = (self.height() - h * h_ratio) // 2
transform = QTransform()
transform.translate(x + self._offset.x(), y + self._offset.y())
transform.scale(w_ratio, h_ratio)
return transform
def setZoom(self, zoom: float) -> None:
self._zoom = zoom
self.update()
@protect
def paintEvent(self, event: QPaintEvent) -> None:
"""Process the paint event by repainting this Widget.
Parameters
----------
event : QPaintEvent
"""
# FIXME[bug?]: this methods seems to be invoked quite often
# - check if this is so and why!
painter = QPainter()
painter.begin(self)
# Compute the transformation
if self._image is not None:
painter.setTransform(self.transform())
self._drawImage(painter)
self._drawImageTool(painter)
self._drawReceptiveField(painter)
self._drawMask(painter)
self._drawMarks(painter)
self._drawMetadata(painter)
painter.end()
def _drawImage(self, painter: QPainter):
"""Draw current image into this ``QImageView``.
Parameters
----------
painter : QPainter
"""
if self._image is not None:
painter.drawImage(QPoint(0, 0), self._image)
def _drawImageTool(self, painter: QPainter):
"""Draw current image into this ``QImageView``.
Parameters
----------
painter : QPainter
"""
if self._imageToolRect is not None:
pen_width = 4
pen_color = Qt.blue
pen = QPen(pen_color)
pen.setWidth(pen_width)
painter.setPen(pen)
painter.drawRect(self._imageToolRect)
def _drawMask(self, painter: QPainter):
"""Display the given image.
Parameters
----------
painter : QPainter
"""
if self._image is not None and self._overlay is not None:
painter.drawImage(QPoint(0, 0), self._overlay)
def _drawReceptiveField(self, painter: QPainter):
if self._receptiveField is None:
return
pen_width = 4
pen_color = Qt.green
pen = QPen(pen_color)
pen.setWidth(pen_width)
painter.setPen(pen)
painter.drawRect(self._receptiveField)
def _drawMarks(self, painter: QPainter):
"""Draw marks on current image into this ``QImageView``.
Parameters
----------
painter : QPainter
"""
if not self._marks:
return
for mark in self._marks:
if isinstance(mark, QRect):
pen_width = 4
pen_color = Qt.green
pen = QPen(pen_color)
pen.setWidth(pen_width)
painter.setPen(pen)
painter.drawRect(mark)
def _drawMetadata(self, painter: QPainter) -> None:
if self._image is not None and self.showMetadata:
line_width = 4
line_width *= (1+max(self._image.width(),
self._image.height())//400)
greenPen = QPen(Qt.green)
greenPen.setWidth(line_width)
redPen = QPen(Qt.red)
redPen.setWidth(line_width)
bluePen = QPen(Qt.blue)
bluePen.setWidth(line_width)
for index, region in enumerate(self._regions):
painter.setPen(bluePen if index == self._currentRegion else
(redPen if getattr(region, 'invalid', False) else
greenPen))
self._drawRegion(painter, region)
self._drawMetadataHelper(painter, self._metadata, color=Qt.green)
self._drawMetadataHelper(painter, self._metadata2, color=Qt.red)
def _drawMetadataHelper(self, painter: QPainter, metadata: Metadata,
pen: QPen = None, line_width=4, color=Qt.green):
if metadata is None or not metadata.has_regions():
return
if pen is None:
pen = QPen(color)
pen.setWidth(line_width * (1+max(self._image.width(),
self._image.height())//400))
painter.setPen(pen)
for region in metadata.regions:
self._drawRegion(painter, region)
def _drawRegion(self, painter: QPainter, region: Region) -> None:
"""Draw a :py:class:`Region` on top of image. Currently supported
regions are :py:class:`BoundingBox` (displayed as rectangle) and
:py:class:`PointsBasedLocation` (displayed as points).
"""
location = region.location
if isinstance(location, BoundingBox):
painter.drawRect(location.x, location.y,
location.width, location.height)
if getattr(region, 'invalid', False):
redPen = QPen(Qt.red)
redPen.setWidth(5)
painter.setPen(redPen)
painter.drawLine(location.x1, location.y1,
location.x2, location.y2)
painter.drawLine(location.x2, location.y1,
location.x1, location.y2)
elif isinstance(location, PointsBasedLocation):
for p in location.points:
painter.drawPoint(p[0], p[1])
#
# Events
#
@protect
def keyPressEvent(self, event: QKeyEvent) -> None:
"""Process key events. The :py:class:`QImageView` supports
the following keys:
space: toggle tool tips
r: toggle the keepAspectRatio flag
"""
key = event.key()
# Space will toggle display of tooltips
if key == Qt.Key_Space:
self.setToolTip(not self._toolTipActive)
elif key == Qt.Key_R:
self.keepAspectRatio = not self.keepAspectRatio
elif key == Qt.Key_M:
self.showMetadata = not self.showMetadata
elif key == Qt.Key_Space:
self.setMode(not self._processed)
elif key == Qt.Key_Delete:
self.invalidateRegion()
elif key == Qt.Key_I:
LOG.info("image: %s, regions(%s): %s, "
"metadata: %s, metadata2: %s, "
"mode=%s, keep aspect ratio=%s, show meta data=%s, "
"tooltip=%s",
self._image is not None,
type(self._regions), self._regions and len(self._regions),
self._metadata is not None, self._metadata2 is not None,
self.mode(), self.keepAspectRatio, self.showMetadata,
self._toolTipActive)
else:
super().keyPressEvent(event)
@protect
def mousePressEvent(self, event: QMouseEvent) -> None:
"""A mouse press toggles between raw and processed mode.
"""
self.setMode(not self.mode())
# remember start position (for dragging the image around)
self._mouseStartPosition = event.pos()
# get the mouse position im image coordinates
transform = self.transform()
if transform is None or not transform.isInvertible():
return # we can not determine the image coordinates
inverted, _invertible = transform.inverted()
imagePosition = inverted.map(event.pos())
# check if mouse clicked into some region of interest
for index, region in enumerate(self._regions):
point = (imagePosition.x(), imagePosition.y())
if point in region:
self.setCurrentRegion(None if index == self._currentRegion
else index)
break
else:
self.setCurrentRegion(None)
@protect
def mouseReleaseEvent(self, event: QMouseEvent) -> None:
"""A mouse press toggles between raw and processed mode.
"""
if self._mouseStartPosition is None:
return # something strange happened - we will ignore this
self._offset += event.pos() - self._mouseStartPosition
self._mouseStartPosition = None
self.update()
@protect
def mouseDoubleClickEvent(self, event: QMouseEvent) -> None:
if self._currentRegion >= 0:
self.invalidateRegion()
else:
# Reset the image
self._zoom = 1.0
self._offset = QPointF(0, 0)
self._mouseStartPosition = None
self.update()
def wheelEvent(self, event):
"""Process the wheel event. The mouse wheel can be used for
zooming.
Parameters
----------
event : QWheelEvent
The event providing the angle delta.
"""
delta = event.angleDelta().y() / 120 # will be +/- 1
# position = (self._offset + event.pos()) / self._zoom
zoom = (1 + delta * 0.01) * self._zoom
self.setZoom(zoom)
# offset = (position * self._zoom) - event.pos()
# self.setOffset(offset)
# We will accept the event, to prevent interference
# with the QScrollArea.
event.accept()
# Attention: The mouseMoveEvent() is only called for regular mouse
# movements, if mouse tracking is explicitly enabled for this
# widget by calling self.setMouseTracking(True). Otherwise it may
# be called on dragging.
@protect
def mouseMoveEvent(self, event: QMouseEvent) -> None:
"""Process mouse movements. If tooltips are active, information on
mouse pointer position are displayed.
Parameters
----------
event: QMouseEvent
The mouse event, providing local and global coordinates.
"""
if not self._toolTipActive:
print("QImageView: mouseMoveEvent:", event.pos(),
bool(event.buttons() & Qt.LeftButton),
bool(event.buttons() & Qt.RightButton))
if self._mouseStartPosition is not None:
self._offset += event.pos() - self._mouseStartPosition
self._mouseStartPosition = event.pos()
self.update()
return
position = event.pos()
size = self.size()
text = f"Screen position: {(position.x(), position.y())}"
text += f" in {size.width()}x{size.height()}"
if self._image is None:
text += "\nNo image"
else:
image_size = self._image.size()
transform = self.transform()
imagePosition = None
if transform is None:
text += "\nNo transformation"
elif not transform.isInvertible():
text += "\nTransformation not invertible"
else:
inverted, _invertible = transform.inverted()
image_rect = QRect(0, 0, image_size.width(), image_size.height())
projected_image_rect = transform.mapRect(image_rect)
projected_image_size = projected_image_rect.size()
text += (f"\nScreen image size: {projected_image_size.width()}x"
f"{projected_image_size.height()}")
imagePosition = inverted.map(position)
text += (f"\nScreen image position: "
f"({imagePosition.x()}, {imagePosition.y()})"
f" in {image_size.width()}x{image_size.height()}")
if self._imageTool is None:
text += "\nNo ImageTool"
else:
# toolTransform = self._imageToolTransformation
text += "\nImageTool: "
rect = self._imageToolRect
if rect is None:
text += "None"
else:
text += f"{rect}"
size = self._imageToolSize
if size is None:
text += "None"
else:
text += f"{size}"
tTransform = self._imageToolTransform
if tTransform is not None and imagePosition is not None:
iTransform, ok = self._imageToolTransform.inverted()
if ok:
toolPosition = iTransform.map(imagePosition)
text += f" at {toolPosition}"
if self._overlay is None:
text += "\nNo activation"
else:
text += f"\nActivation shape: {self._overlay.size()}"
QToolTip.showText(event.globalPos(), text)
#
# ImageTool.Observer
#
@protect
def image_changed(self, observable: ImageObservable,
change: ImageObservable.Change) -> None:
"""A new image is available. Display that image in this
:py:class:`QImagView`.
"""
self.setImage(observable.image)
#
# Toolbox.Observer
#
@protect
def toolbox_changed(self, toolbox: Toolbox, info: Toolbox.Change) -> None:
if info.input_changed:
self.setData(toolbox.input_data)
#
# ActivationEngine.Observer
#
# FIXME[old]
def activation_changed(self, engine, #: ActivationEngine,
info # : ActivationEngine.Change
) -> None:
"""The :py:class:`QImageView` is interested in the
input iamges, activations and units.
"""
if info.input_changed:
self._updateImage()
return # FIXME[old]
if info.activation_changed: # or info.unit_changed: FIXME[old]
try:
activation = engine.get_activation()
unit = engine.unit
except:
activation = None
unit = None
# For convolutional layers add a activation mask on top of the
# image, if a unit is selected
if (activation is not None and unit is not None and
activation.ndim > 1):
# exclude dense layers
activation_mask = grayscaleNormalized(activation[..., unit])
self.setMask(activation_mask)
field = engine.receptive_field
if field is not None:
self.addMark(QRect(field[0][1], field[0][0],
field[1][1]-field[0][1],
field[1][0]-field[0][0]))
else:
self.setMask(None)
#
# Update
#
def _updateImage(self) -> None:
"""Set the image to be displayed either based on the current
state of the Toolbox or the ActivationEngine. This will also
take the current display mode (raw or processed) into account.
"""
if self._activation is None:
data = self._toolbox.input_data if self._toolbox else None
elif self._processed:
data = self._activation.input_data
else:
data = self._activation.raw_input_data
self.setData(data)
# FIXME[old]
# self.setMetadata(self._toolbox and self._toolbox.input_metadata)
def mode(self) -> bool:
return self._processed
def setMode(self, processed: bool) -> None:
"""The display mode was changed. There are two possible modes:
(1) processed=False (raw): the input is shown as provided by the source
(2) processed=True: the input is shown as presented to the network
Arguments
---------
precessed: bool
The new display mode (False=raw, True=processed).
"""
if not self._toolbox:
processed = False
if processed != self._processed:
self._processed = processed
self.modeChanged.emit(processed)
self._updateImage()
@pyqtSlot(int)
def setCurrentRegion(self, index: int) -> None:
"""Set the index of the currently selected region.
Arguments
---------
index:
The new index. A value of `-1` means that no region is
selected.
"""
if index != self._currentRegion:
self._currentRegion = index
self.currentRegionChanged.emit(index)
self.update()
@pyqtSlot(int)
def updateRegion(self, index: int) -> None:
"""Set the index of the currently selected region.
Arguments
---------
index:
The new index. A value of `-1` means that no region is
selected.
"""
self.update()
class QImageBatchView(QWidget):
"""A :py:class:`QWidget` to display a batch of images.
"""
def __init__(self, images: Union[Data, np.ndarray] = None,
**kwargs) -> None:
super().__init__(**kwargs)
self._initUI()
self._layoutUI()
self._images = None
self.setImages(images)
def _initUI(self) -> None:
"""Initialize the user interface. The essential components
are a :py:class:`QImageView` to display an image and
some navigation tool to select an image from the batch.
"""
self._imageView = QImageView()
self._indexControls = QIndexControls()
self._indexControls.indexChanged.connect(self.onIndexChanged)
def _layoutUI(self) -> None:
"""Layout the widget.
"""
layout = QVBoxLayout()
layout.addWidget(self._imageView)
layout.addWidget(self._indexControls)
self.setLayout(layout)
def index(self) -> int:
"""The index of the currently selected image.
"""
return self._indexControls.index()
def setIndex(self, index: int) -> None:
"""Set the index of the image to display.
"""
# Update the indexControls if necessary. This will
# in turn emit the 'indexChanged' signal, which triggers
# this function again.
if index != self.index():
self._indexControls.setIndex(index)
elif (self._images is not None and
index >= 0 and index < len(self._images)):
self._imageView.setImage(self._images[index])
else:
self._imageView.setImage(None)
def setImages(self, images: Union[Data, np.ndarray],
index: int = 0) -> None:
"""Set the images to be displayed in this
:py:class:`QImageBatchView`.
Arguments
---------
images:
The images belonging to the batch.
index:
The index of the image to display.
"""
self._images = images
if images is not None:
self._indexControls.setEnabled(True)
self._indexControls.setElements(len(images))
else:
self._indexControls.setEnabled(False)
index = -1
self.setIndex(index)
self._indexControls.update()
@protect
def onIndexChanged(self, index: int) -> None:
"""A slot to react to changes of the batch index.
"""
self.setIndex(index)
class QMultiImageView(QWidget):
"""Display multiple images. Generally, images are displayed in a grid,
in simple cases images are displayed either horizontally (in a
row) or vertically (in a column). The grid is devined by a
pair of integers (rows, columns), where one of these values can
be `None`, meaning the grid can dynamically grow into that direction.
The size at which the individual images are displayed can be
controlled by the property :py:prop:`imageSize`.
The :py:class:`QMultiImageView` allows to select one image as
current image. When the current image is changed, the
`currentImageChanged` signal is emitted with the index of the
current image. A value of `-1` means that no image is selected.
Properties
----------
grid: Tuple[int, int]
The number of rows and columns displayed in this
:py:class:`QMultiImageView`. One of the entries can be `None`,
meaning that the grid can grow along that axis as required.
orientation: Qt.Orientation
The orientation of this :py:class:`QMultiImageView`. If
`Qt.Horizontal`, the grid will grow horizontally, meaning
images will be displayed in one (or multiple) rows, if
`Qt.Vertical`, images will be displayed in columns.
Readonly properties
-------------------
_images: List[Image]
A list of `Image`, each having a `qimage` attribute holding the
image to be displayed as a `QImage`.
_qregions:
A list of :py:class:`Region`s describing regions in a larger
image from which the images displayed in this
:py:class:`QMultiImageView` are extracted. The regions
may contain annotations which may influence how the corresponding
image is displayed.
"""
NavigationContinue: int = 1
NavigationCircular: int = 2
NavigationBlock: int = 3
currentImageChanged = pyqtSignal(int)
annotationsChanged = pyqtSignal(int)
_images: List[Image] = None
def __init__(self, orientation: Qt.Orientation = Qt.Horizontal,
grid: Tuple[int, int] = None,
**kwargs) -> None:
super().__init__(**kwargs)
self._regions = None
self._currentIndex = -1 # -1 means no image selected
if grid is not None:
self._grid = grid
else:
self._grid = \
(1, None) if orientation == Qt.Horizontal else (None, 1)
self.setImageSize(QSize(100, 100), spacing=10)
self._initLayout()
self._navigation = self.NavigationContinue
# By default, a QWidget does not accept the keyboard focus, so
# we need to enable it explicitly: Qt.StrongFocus means to
# get focus by 'Tab' key as well as by mouse click.
self.setFocusPolicy(Qt.StrongFocus)
def _initLayout(self) -> None:
# set the size policy
#
# QSizePolicy.MinimumExpanding: The sizeHint() is minimal, and
# sufficient. The widget can make use of extra space, so it
# should get as much space as possible.
if self.orientation() == Qt.Horizontal:
self.setSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.Fixed)
elif self.orientation() == Qt.Vertical:
self.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.MinimumExpanding)
else:
self.setSizePolicy(QSizePolicy.MinimumExpanding,
QSizePolicy.MinimumExpanding)
def count(self) -> int:
"""The number of images currently displayed in this
:py:class:`QMultiImageView`.
"""
return len(self._images)
def navigation(self) -> int:
return self._navigation
def setNavigation(self, navigation: int) -> None:
self._navigation = navigation
def rows(self) -> int:
"""The number of rows used for display in this
:py:class:`QMultiImageView`.
"""
if not self.count():
return 0
if self._grid[0] is not None:
return self._grid[0]
return -(-self.count() // self._grid[1])
def columns(self) -> int:
"""The number of columns used for display in this
:py:class:`QMultiImageView`.
"""
if not self.count():
return 0
if self._grid[1] is not None:
return self._grid[1]
return -(-self.count() // self._grid[0])
def grid(self) -> Tuple[int, int]:
"""The grid in terms of rows and columns used to display
images in this :py:class:`QMultiImageView`.
"""
return self._grid
def setGrid(self, grid: Tuple[int, int]) -> None:
"""Set the grid in terms of rows and columns used to
display images in this :py:class:`QMultiImageView`.
Either rows or columns may be `None`, indicating that the
display may grow as needed along this axis.
"""
if grid != self._grid:
self._grid = grid
self._gridChanged()
def orientation(self) -> Qt.Orientation:
"""The orientation of this :py:class:`QMultiImageView`. If
`Qt.Horizontal`, images will be displayed in row, if
`Qt.Vertical`, images will be displayed in column.
"""
return Qt.Horizontal if self._grid[1] is None else Qt.Vertical
def _gridPosition(self, index: int = None) -> Tuple[int, int]:
"""Get the grid position as (row, column) for the image with
the given index.
"""
if index is None:
index = self._currentIndex
if index is None:
return -1, -1
if self._grid[0] is not None:
row, column = index % self._grid[0], index // self._grid[0]
else:
row, column = index // self._grid[1], index % self._grid[1]
return row, column
def _indexForPosition(self, row: int, column: int) -> int:
if row == -1 or column == -1:
index = -1
elif self._grid[0] is not None:
index = column * self._grid[0] + row
else:
index = row * self._grid[1] + column
return index
def _setGridPosition(self, row: int, column: int) -> None:
"""Set the index of the currently selected image.
"""
index = self._indexForPosition(row, column)
if index >= self.count():
index = -1
self.setCurrentImage(-1 if index == self._currentIndex else index)
def _gridChanged(self) -> None:
"""The grid has changed. This implies that the size of this
:py:class:`QMultiImageView` may have changed.
"""
if self._grid[0] is not None: # Qt.Horizontal
width = ((-(-self.count() // self._grid[0])) *
(self._imageSize.width() + self._spacing))
height = self._grid[0] * (self._imageSize.height() + self._spacing)
else: # Qt.Vertical
width = self._grid[1] * (self._imageSize.width() + self._spacing)
height = ((-(-self.count() // self._grid[1])) *
(self._imageSize.height() + self._spacing))
self.setMinimumSize(width, height)
self.resize(width, height)
self.update()
def imageSize(self) -> QSize:
"""The size at which individual images are to be displayed
in this :py:class:`QMultiImageView`.
"""
return self._imageSize
def setImageSize(self, imageSize: QSize, spacing: int = None) -> None:
"""The size at which individual images are to be displayed
in this :py:class:`QMultiImageView`.
"""
self._imageSize = imageSize
if spacing is not None:
self._spacing = spacing
self.setMinimumSize(imageSize.width() + self._spacing,
imageSize.height() + self._spacing)
def spacing(self) -> int:
"""The spacing to be put between the images displayed
in this :py:class:`QMultiImageView`. At the outer boundary,
half of the spacing will be used. That is, each grid cell
occupies imageSize + spacing pixel, with the image centered
in that cell.
"""
return self._spacing
def setImages(self, images: Iterable[Imagelike]) -> None:
"""Set the images to be displayed by this
:py:class:`QMultiImageView`.
Arguments
---------
images:
The images to be added. These may be given in any image
format accepted by the toolbox, but will be converted
to :py:class:`QImage` internally.
"""
self._currentIndex = -1
self._regions = None
self._images = []
for image in images:
image = Image(image)
image.add_attribute('qimage', imageToQImage(image))
self._images.append(image)
self._gridChanged()
def setImagesFromRegions(self, image: Imagelike,
regions: Iterable[Region]) -> None:
"""Set the images to be displayed from regions of a larger image.
Arguments
---------
image:
The large image from which regions are cut out.
regions:
The regions to extract from that image. The regions
may contain further annotations which may be used to
influence the display.
"""
if image is None:
self._image = None
self._regions = []
self.setImages([])
elif True:
self._image = Image.as_array(image)
self._regions = list(regions)
self.setImages(region.extract_from_image(self._image)
for region in self._regions
if isinstance(region.location, BoundingBox))
else: # FIXME[todo]
self._qimage = imageToQImage(image)
self._regions = list(regions)
self._rects = [self._rects_for_image(index)
for index in range(len(self._regions))]
def _rects_for_image(self, index: int) -> Tuple[QRect, QRect]:
"""
"""
bbox = self._rects[index]
source = QRect(bbox.x, bbox.y, bbox.width, bbox.height)
position = QPoint(self._spacing // 2, self._spacing // 2)
if self.orientation() == Qt.Horizontal:
position.setX(position.x() +
index * (self._imageSize.width() + self._spacing))
else:
position.setY(position.y() +
index * (self._imageSize.height() + self._spacing))
target = QRect(position, self._imageSize)
return source, target
def currentImage(self) -> int:
"""The index of the currently selected image.
"""
return self._currentIndex
@pyqtSlot(int)
def setCurrentImage(self, index: int) -> None:
"""Set the index of the currently selected image.
"""
if index is not None and index > -1:
if not self.count():
index = -1
elif index >= self.count():
index = -1
elif index < 0:
index = -1
if index != self._currentIndex:
self._currentIndex = index
self.currentImageChanged.emit(index)
self.update()
def imagePosition(self, index: int = None) -> QPoint:
"""Return the center point of the image with the given index.
Arguments
---------
index:
An index identifying the desired image. If no index is
provided, the currently selected image is used.
Result
------
center:
The center point of the given image or `None` if no valid
image was specified.
"""
if index is None:
index = self.currentImage()
if not 0 <= index < len(self._images):
return None
horizontal_skip = self._spacing + self._imageSize.width()
vertical_skip = self._spacing + self._imageSize.height()
row, column = self._gridPosition()
return QPoint(column * horizontal_skip + (horizontal_skip // 2),
row * vertical_skip + (horizontal_skip // 2))
@protect
def mousePressEvent(self, event: QMouseEvent) -> None:
"""A mouse press selects the current image.
"""
row = event.y() // (self._imageSize.width() + self._spacing)
column = event.x() // (self._imageSize.width() + self._spacing)
self._setGridPosition(row, column)
@protect
def keyPressEvent(self, event: QKeyEvent) -> None:
"""Process `QKeyEvent`s. Use cursor keys to change the currently
selected image.
Arguments
---------
event:
The key event to process.
"""
key = event.key()
if key == Qt.Key_Space:
self.setCurrentImage(None)
elif key == Qt.Key_Escape:
self._setGridPosition(-1, -1)
elif key in (Qt.Key_Left, Qt.Key_Up, Qt.Key_Right, Qt.Key_Down):
# FIXME[todo]: different modes of navigation
# wrap <->
# incremental
# hard boundaries
row, column = self._gridPosition()
mode = 1 # wrap
if row < 0:
row, column = (0, 0) if self.count() else (-1, -1)
elif self._navigation == self.NavigationCircular:
if key == Qt.Key_Left:
column -= 1
if column < 0:
column = self.columns() - 1
if self._indexForPosition(row, column) >= self.count():
column -= 1
elif key == Qt.Key_Right:
column = (column+1) % self.columns()
if self._indexForPosition(row, column) >= self.count():
column = 0
elif key == Qt.Key_Up:
row -= 1
if row < 0:
row = self.rows() - 1
if self._indexForPosition(row, column) >= self.count():
row -= 1
elif key == Qt.Key_Down:
row = (row+1) % self.rows()
if self._indexForPosition(row, column) >= self.count():
row = 0
elif self._navigation == self.NavigationContinue:
if key == Qt.Key_Left:
column -= 1
if column < 0:
column = self.columns() - 1
if row > 0:
row -= 1
else:
row = self.rows() - 1
if self._indexForPosition(row, column) >= self.count():
if row > 1:
row -= 1
else:
column = self.count() - 1
elif key == Qt.Key_Right:
column = (column+1) % self.columns()
if column == 0:
row += 1
if self._indexForPosition(row, column) >= self.count():
row, column = 0, 0
elif key == Qt.Key_Up:
row -= 1
if row < 0:
row = self.rows() - 1
if column > 0:
column -= 1
else:
column = self.columns() - 1
if self._indexForPosition(row, column) >= self.count():
row -= 1
elif key == Qt.Key_Down:
row += 1
if self._indexForPosition(row, column) >= self.count():
row = 0
column = (column+1) % self.columns()
elif self._navigation == self.NavigationBorder:
if key == Qt.Key_Left:
if column > 0:
column -= 1
elif key == Qt.Key_Right:
if self._indexForPosition(row, column+1) < self.count():
column += 1
elif key == Qt.Key_Up:
if row > 0:
row -= 1
elif key == Qt.Key_Down:
if self._indexForPosition(row+1, column) < self.count():
row += 1
self._setGridPosition(row, column)
elif key in (Qt.Key_Plus, Qt.Key_Minus):
grid = list(self._grid)
if grid[0] is None:
if key == Qt.Key_Plus:
grid[1] += 1
elif grid[1] > 1:
grid[1] -= 1
elif grid[1] is None:
if key == Qt.Key_Plus:
grid[0] += 1
elif grid[0] > 1:
grid[0] -= 1
self.setGrid(tuple(grid))
elif key == Qt.Key_D:
self._debug()
else:
super().keyPressEvent(event)
@protect
def resizeEvent(self, event: QResizeEvent) -> None:
"""Process the resize event. Resizing the
:py:class:`QMultiImageView` may adapt the image sizes.
This is an ad hoc implementation, details and policies need
still to be spelled out!
"""
# FIXME[todo]: define the exact behviour and add methods to
# control the behaviour.
# - what is the exact meaning of the imageSize property:
# current size, minimum size?
# - how to deal with images of different sizes?
# -> all images are resized to imageSize
# -> keep aspect ratio?
# - should the new size respect some aspect ratio?
# at least for a fixed grid size (or when not embedded in some
# scroll area) this seems reasonable.
super().resizeEvent(event)
newSize = event.size()
if self._grid[0] is None: # Qt.Vertical
pass
elif self._grid[1] is None: # Qt.Horizontal
pass
else:
width = newSize.width() - self._grid[1] * self._spacing
height = newSize.height() - self._grid[0] * self._spacing
width //= self._grid[1]
height //= self._grid[0]
self._imageSize = QSize(width, height)
# FIXME[hack]: this will change the minimumSize which
# in turn prevents shrinking the widget again.
@protect
def paintEvent(self, event: QPaintEvent) -> None:
"""Process the paint event by repainting this Widget.
Parameters
----------
event:
The :py:class:`QPaintEvent`paint eve
"""
super().paintEvent(event)
painter = QPainter()
painter.begin(self)
line_width = 4
color = Qt.blue
self._bluePen = QPen(color)
self._bluePen.setWidth(line_width)
self._redPen = QPen(Qt.red)
self._redPen.setWidth(2 * line_width)
horizontal_skip = self._spacing + self._imageSize.width()
vertical_skip = self._spacing + self._imageSize.height()
half_spacing = self._spacing // 2
position = QPoint(half_spacing, half_spacing)
for index, image in enumerate(self._images):
# update position
row, column = self._gridPosition(index)
position = QPoint(column * horizontal_skip + half_spacing,
row * vertical_skip + half_spacing)
rect = QRect(position, self._imageSize)
self._paintImage(painter, index, image, rect)
painter.end()
def _paintImage(self, painter: QPainter, index: int,
image: Image, rect: QRect) -> None:
painter.drawImage(rect, image.qimage)
# draw decoration around current image
if index == self._currentIndex:
painter.setPen(self._bluePen)
painter.drawRect(rect.x()-2, rect.y()-2,
rect.width()+4, rect.height()+4)
def _debug(self) -> None:
"""Output some debug information to standard output.
"""
print(f"QMultiImageView: {self.count()} images, grid={self.grid()},"
f"size={self.size()}, sizeHint={self.sizeHint()}")
| 35.634684
| 135
| 0.570365
|
359ee0738ef4b8d1d83535433cfcf294a7a50732
| 16,453
|
py
|
Python
|
src/primaires/scripting/test.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/test.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
src/primaires/scripting/test.py
|
stormi/tsunami
|
bdc853229834b52b2ee8ed54a3161a1a3133d926
|
[
"BSD-3-Clause"
] | null | null | null |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la classe Test détaillée plus bas."""
import re
import traceback
from fractions import Fraction
from time import time
from abstraits.obase import BaseObj
from primaires.format.fonctions import *
from primaires.scripting.parser import expressions
from primaires.scripting.instruction import Instruction, ErreurExecution
from primaires.scripting.jeton import Jeton
from primaires.scripting.exceptions import InterrompreCommande
from primaires.scripting.constantes.connecteurs import CONNECTEURS
from primaires.scripting.utile.fonctions import *
from .alerte import Alerte
class Test(BaseObj):
"""Classe contenant un ensemble de tests.
"""
def __init__(self, evenement, chaine_test=""):
"""Constructeur d'une suite de tests.
Il prend en paramètre :
evenement -- l'évènement qui possède le test
chaine_test -- la suite de tests sous la forme d'une chaîne
"""
BaseObj.__init__(self)
self.__evenement = evenement
self.__tests = None
self.__instructions = []
self.__cache = None
self.dernier_niveau = 0
self.etape = None
self._construire()
if chaine_test:
self.construire(chaine_test)
def __getnewargs__(self):
return (None, )
def __str__(self):
return str(self.__tests)
@property
def evenement(self):
return self.__evenement
@property
def instructions(self):
"""Retourne une liste déréférencée des instructions."""
return list(self.__instructions)
@property
def appelant(self):
"""Retourne l'appelant, c'est-à-dire le parent du script."""
return self.evenement.script.parent
@property
def acteur(self):
"""Retourne l'acteur de la quête.
Dans la plupart des cas, c'est la variable 'personnage'. Mais
l'évènement a la possibilité de redéfinir cette variable.
"""
acteur = self.evenement.nom_acteur
return self.evenement.espaces.variables[acteur]
@property
def tests(self):
"""Retourne le test."""
return self.__tests
@property
def sc_tests(self):
"""Retourne le test sans couleurs sous la forme d'une chaîne.
Il s'agit de la chaîne de test scripting tel qu'elle
est affichée dans l'éditeur, mais sans couleurs.
"""
return supprimer_couleurs(str(self.__tests))
def get_cache(self):
"""Calcul le cache Python si nécessaire.
Le cache est soit écrit dans l'attribut d'instance __cache,
soit à calculer depuis la liste des instructions. Quand le
calcul complet a besoin d'être fait, l'attribut d'instance
'__cache' est mis à jour. Ce système permet de garder en mémoire
le code Python propre à un test au lieu de le redemander à
chaque fois, ce qui peut ralentir l'exécution.
"""
if self.__cache is None:
# Calcul le cache
self.calculer_cache()
return self.__cache
def calculer_cache(self):
"""Calcul le cache et l'écrit dans self.__cache."""
code = "def script():\n"
lignes = []
for instruction in self.__instructions:
lignes.append((" " * 4 * (instruction.niveau + 1)) + \
instruction.code_python)
code += "\n".join(lignes)
code += "\n yield None"
self.__cache = code
def construire(self, chaine_test):
"""Construit la suite de chaînes en fonction de la chaîne.
"""
# On essaye d'interpréter la suite de tests
self.__tests = expressions["tests"].parser(chaine_test)[0]
def ajouter_instruction(self, message):
"""Construit et ajoute l'instruction."""
type_instruction = Instruction.test_interpreter(message)
instruction = type_instruction.construire(message)
instruction.deduire_niveau(self.dernier_niveau)
self.dernier_niveau = instruction.get_niveau_suivant()
self.__instructions.append(instruction)
self.calculer_cache()
def ajouter_instructions(self, instructions):
"""Ajoute plusieurs instructions.
Les instructions doivent être précisées sous la forme d'une
chaîne. Chaque instruction est sur une ligne différente.
L'indentation et les lignes vides ne sont pas prises en
compte.
"""
for instruction in instructions.split("\n"):
instruction = instruction.strip()
if instruction:
self.ajouter_instruction(instruction)
def remplacer_instruction(self, ligne, message):
"""Remplace une instruction."""
if ligne not in range(len(self.__instructions)):
raise IndexError("La ligne {} n'existe pas.".format(ligne))
ancienne_instruction = self.__instructions[ligne]
type_instruction = Instruction.test_interpreter(message)
instruction = type_instruction.construire(message)
instruction.niveau = ancienne_instruction.niveau
self.__instructions[ligne] = instruction
self.reordonner()
self.calculer_cache()
def corriger_instruction(self, ligne, texte, remplacement):
"""Corrige l'instruction spécifiée.
On attend en paramètre :
ligne -- le numéro de la ligne de l'instruction à corriger
texte -- le texte à rechercher
remplacement -- le texte à remplacer
La fonction de recherche ne tient pas compte des accents ni des
majuscules.
"""
if ligne not in range(len(self.__instructions)):
raise IndexError("La ligne {} n'existe pas.".format(ligne))
instruction = self.__instructions[ligne]
niveau = instruction.niveau
instruction = supprimer_couleurs(str(instruction))
chaine_recherche = supprimer_accents(instruction).lower()
texte = supprimer_accents(texte).lower()
no_car = chaine_recherche.find(texte)
while no_car >= 0:
instruction = instruction[:no_car] + remplacement + \
instruction[no_car + len(texte):]
chaine_recherche = supprimer_accents(instruction).lower()
no_car = chaine_recherche.find(texte, no_car + len(remplacement))
message = instruction
type_instruction = Instruction.test_interpreter(message)
instruction = type_instruction.construire(message)
instruction.niveau = niveau
self.__instructions[ligne] = instruction
self.reordonner()
self.calculer_cache()
def inserer_instruction(self, ligne, message):
"""Insère une instruction à la ligne précisée."""
if ligne not in range(len(self.__instructions)):
raise IndexError("La ligne {} n'existe pas.".format(ligne))
type_instruction = Instruction.test_interpreter(message)
instruction = type_instruction.construire(message)
self.__instructions.insert(ligne, instruction)
self.reordonner()
self.calculer_cache()
def supprimer_instruction(self, ligne):
"""Supprime une instruction."""
if ligne not in range(len(self.__instructions)):
raise IndexError("La ligne {} n'existe pas.".format(ligne))
del self.__instructions[ligne]
self.reordonner()
self.calculer_cache()
def reordonner(self):
"""Vérifie et corrige les tabulations de toutes les instructions."""
self.dernier_niveau = 0
for instruction in self.__instructions:
instruction.deduire_niveau(self.dernier_niveau)
self.dernier_niveau = instruction.get_niveau_suivant()
def tester(self, evenement, forcer=False):
"""Teste le test."""
# Si le test est relié à une quête, on teste le niveau dans la quête
etape = self.etape
if etape:
if not forcer and not self.acteur or not self.acteur.quetes[
etape.quete.cle].peut_faire(etape.quete, etape.niveau):
return False
if not self.__tests:
return True
py_code = self.__tests.code_python
globales = self.get_globales(evenement)
res = False
try:
res = bool(eval(py_code, globales))
except Exception as err:
self.erreur_execution(str(err))
return res
def get_globales(self, evenement):
"""Retourne le dictionnaire des globales d'exécution."""
# Constitution des globales
return {
"actions": type(self).importeur.scripting.actions,
"fonctions": type(self).importeur.scripting.fonctions,
"variables": evenement.espaces.variables,
"evt": evenement,
"Fraction": Fraction,
"formatter": formatter,
"get_variables": get_variables,
}
def erreur_execution(self, message):
"""Méthode remontant l'erreur aux immortels concernés.
Une alerte est créée pour remonter l'information.
"""
appelant = self.appelant
evenement = str(self.evenement.nom_complet)
tests = self.__tests and "si " + str(self) or "sinon"
pile = traceback.format_exc()
# Extraction de la ligne d'erreur
reg = re.search("File \"\<string\>\", line ([0-9]+)", pile)
no_ligne = -1
if reg:
no_ligne = int(reg.groups()[-1]) - 1
if no_ligne > 0:
ligne = echapper_accolades(str(self.__instructions[no_ligne - 1]))
else:
no_ligne = "|err|inconnue|ff|"
ligne = "Ligne inconnue."
# Création de l'alerte
alerte = Alerte(appelant, evenement, tests, no_ligne, ligne,
message, pile)
type(self).importeur.scripting.alertes[alerte.no] = alerte
# On informe les immortels connectés
for joueur in type(self).importeur.connex.joueurs_connectes:
if joueur.est_immortel():
joueur << "|err|Une erreur s'est produite lors de " \
"l'exécution d'un script.\nL'alerte {} a été " \
"créée pour en rendre compte.|ff|".format(alerte.no)
def executer_code(self, evenement, code, personnage=None,
alarme=None, exc_interruption=True, bloquant=None, jeton=None):
"""Exécute le code passé en paramètre.
Le code est sous la forme d'un générateur. On appelle donc
la fonction next et récupère le retour (la valeur suivant
le yield).
Si ce retour est 0, on continue l'exécution (appel récursif).
Si le retour est un tuple, on crée une alarme
Si le retour est un autre nombre, on diffère l'exécutçion
Si le retour est None, on s'arrête.
"""
if personnage and alarme:
if not importeur.scripting.alarme_existe(personnage, alarme):
return
t1 = time()
# Exécution
if bloquant and not bloquant.complet:
nom = "script_dif<" + str(id(code)) + ">"
importeur.diffact.ajouter_action(nom, 1,
self.executer_code, evenement, code, personnage,
alarme, False, bloquant, jeton)
return
importeur.scripting.execute_test.append(self)
try:
ret = next(code)
except ErreurExecution as err:
self.erreur_execution(str(err))
except InterrompreCommande as err:
if exc_interruption:
raise err
except Exception as err:
self.erreur_execution(str(err))
else:
if ret is None:
if jeton:
jeton.completer()
return
tps = 0
personnage = alarme = None
if isinstance(ret, tuple):
# Force l'écriture de variables indépendantes
code.gi_frame.f_globals["variables"] = dict(
code.gi_frame.f_globals["variables"])
personnage = ret[1]
alarme = ret[2]
try:
tps = int(ret[0])
assert tps >= 0
except (ValueError, AssertionError):
pass
elif isinstance(ret, Jeton):
tps = 1
bloquant = ret
else:
try:
tps = int(ret)
assert tps >= 0
except (ValueError, AssertionError):
pass
if tps == 0:
self.executer_code(evenement, code, personnage, alarme, False)
else:
# On diffère l'exécution du script
nom = "script_dif<" + str(id(code)) + ">"
importeur.diffact.ajouter_action(nom, tps,
self.executer_code, evenement, code, personnage,
alarme, False, bloquant, jeton)
finally:
importeur.scripting.execute_test.remove(self)
t2 = time()
diff = t2 - t1
if diff > importeur.scripting.tps_script:
appelant = self.appelant
appelant = type(appelant).nom_scripting + " " + \
repr(appelant)
evenement = str(self.evenement.nom_complet)
tests = self.__tests and "si " + str(self) or "sinon"
ligne = "{}, évènement {}, test {}".format(appelant,
evenement, tests)
importeur.scripting.scripts_gourmands[ligne] = diff
def executer_instructions(self, evenement, jeton=None):
"""Convertit et exécute la suite d'instructions.
Pour plus de facilité, on convertit le script en Python pour l'heure
avant l'exécution.
"""
etape = self.etape
if etape and self.acteur:
self.acteur.quetes[etape.quete.cle].deverouiller()
code = self.get_cache()
# Constitution des globales
globales = self.get_globales(evenement)
try:
exec(code, globales)
except Exception as err:
self.erreur_execution(str(err))
else:
code = globales['script']()
self.executer_code(evenement, code, jeton=jeton)
# Si le test est relié à une quête
if etape and self.acteur:
# Si aucun verrou n'a été posé
if not self.acteur.quetes[etape.quete.cle].verrouille:
self.acteur.quetes.valider(etape.quete, etape.niveau)
@staticmethod
def retirer_builtins(nom):
"""Retire (si nécessaire) le nom des __builtins__."""
if nom in __builtins__:
del __builtins__[nom]
| 36.807606
| 79
| 0.617334
|
58d6c5a49ebf4dff7c49afac63ca0f8e5135b01b
| 39,974
|
py
|
Python
|
conans/client/conan_api.py
|
JosiahOne/conan
|
4e64c29e7d878230829560b9b51e9e192fc0b8b6
|
[
"MIT"
] | null | null | null |
conans/client/conan_api.py
|
JosiahOne/conan
|
4e64c29e7d878230829560b9b51e9e192fc0b8b6
|
[
"MIT"
] | null | null | null |
conans/client/conan_api.py
|
JosiahOne/conan
|
4e64c29e7d878230829560b9b51e9e192fc0b8b6
|
[
"MIT"
] | null | null | null |
import os
import sys
import requests
import conans
from conans import __version__ as client_version, tools
from conans.client.cmd.create import create
from conans.client.recorder.action_recorder import ActionRecorder
from conans.client.client_cache import ClientCache
from conans.client.conf import MIN_SERVER_COMPATIBLE_VERSION, ConanClientConfigParser
from conans.client.manager import ConanManager, existing_info_files
from conans.client.migrations import ClientMigrator
from conans.client.output import ConanOutput
from conans.client.profile_loader import read_profile, profile_from_args, \
read_conaninfo_profile
from conans.client.recorder.search_recorder import SearchRecorder
from conans.client.recorder.upload_recoder import UploadRecorder
from conans.client.remote_manager import RemoteManager
from conans.client.remote_registry import RemoteRegistry
from conans.client.rest.auth_manager import ConanApiAuthManager
from conans.client.rest.rest_client import RestApiClient
from conans.client.rest.conan_requester import ConanRequester
from conans.client.rest.version_checker import VersionCheckerRequester
from conans.client.runner import ConanRunner
from conans.client.store.localdb import LocalDB
from conans.client.cmd.test import PackageTester
from conans.client.userio import UserIO
from conans.errors import ConanException
from conans.model.ref import ConanFileReference
from conans.model.version import Version
from conans.paths import get_conan_user_home, CONANINFO, BUILD_INFO
from conans.util.env_reader import get_env
from conans.util.files import save_files, exception_message_safe, mkdir
from conans.util.log import configure_logger
from conans.util.tracer import log_command, log_exception
from conans.tools import set_global_instances
from conans.client.cmd.uploader import CmdUpload
from conans.client.cmd.profile import cmd_profile_update, cmd_profile_get,\
cmd_profile_delete_key, cmd_profile_create, cmd_profile_list
from conans.client.cmd.search import Search
from conans.client.cmd.user import users_clean, users_list, user_set
from conans.client.importer import undo_imports
from conans.client.cmd.export import cmd_export, export_alias
from conans.unicode import get_cwd
from conans.client.remover import ConanRemover
from conans.client.cmd.download import download
from conans.model.workspace import Workspace
from conans.client.graph.graph_manager import GraphManager
from conans.client.loader import ConanFileLoader
from conans.client.graph.proxy import ConanProxy
from conans.client.graph.python_requires import ConanPythonRequire
from conans.client.graph.range_resolver import RangeResolver
default_manifest_folder = '.conan_manifests'
def get_request_timeout():
timeout = os.getenv("CONAN_REQUEST_TIMEOUT")
try:
return float(timeout) if timeout is not None else None
except ValueError:
raise ConanException("Specify a numeric parameter for 'request_timeout'")
def get_basic_requester(client_cache):
requester = requests.Session()
# Manage the verify and the client certificates and setup proxies
return ConanRequester(requester, client_cache, get_request_timeout())
def api_method(f):
def wrapper(*args, **kwargs):
the_self = args[0]
try:
curdir = get_cwd()
log_command(f.__name__, kwargs)
with tools.environment_append(the_self._client_cache.conan_config.env_vars):
# Patch the globals in tools
return f(*args, **kwargs)
except Exception as exc:
msg = exception_message_safe(exc)
try:
log_exception(exc, msg)
except:
pass
raise
finally:
os.chdir(curdir)
return wrapper
def _make_abs_path(path, cwd=None, default=None):
"""convert 'path' to absolute if necessary (could be already absolute)
if not defined (empty, or None), will return 'default' one or 'cwd'
"""
cwd = cwd or get_cwd()
if not path:
abs_path = default or cwd
elif os.path.isabs(path):
abs_path = path
else:
abs_path = os.path.normpath(os.path.join(cwd, path))
return abs_path
def _get_conanfile_path(path, cwd, py):
"""
param py= True: Must be .py, False: Must be .txt, None: Try .py, then .txt
"""
path = _make_abs_path(path, cwd)
if os.path.isdir(path): # Can be a folder
if py:
path = os.path.join(path, "conanfile.py")
elif py is False:
path = os.path.join(path, "conanfile.txt")
else:
path_py = os.path.join(path, "conanfile.py")
if os.path.exists(path_py):
path = path_py
else:
path = os.path.join(path, "conanfile.txt")
if not os.path.isfile(path): # Must exist
raise ConanException("Conanfile not found: %s" % path)
if py and not path.endswith(".py"):
raise ConanException("A conanfile.py is needed (not valid conanfile.txt)")
return path
class ConanAPIV1(object):
@staticmethod
def instance_remote_manager(requester, client_cache, user_io, _client_version,
min_server_compatible_version):
# Verify client version against remotes
version_checker_req = VersionCheckerRequester(requester, _client_version,
min_server_compatible_version,
user_io.out)
# To handle remote connections
put_headers = client_cache.read_put_headers()
rest_api_client = RestApiClient(user_io.out, requester=version_checker_req,
put_headers=put_headers)
# To store user and token
localdb = LocalDB(client_cache.localdb)
# Wraps RestApiClient to add authentication support (same interface)
auth_manager = ConanApiAuthManager(rest_api_client, user_io, localdb)
# Handle remote connections
remote_manager = RemoteManager(client_cache, auth_manager, user_io.out)
return localdb, rest_api_client, remote_manager
@staticmethod
def factory(interactive=None):
"""Factory"""
# Respect color env setting or check tty if unset
color_set = "CONAN_COLOR_DISPLAY" in os.environ
if ((color_set and get_env("CONAN_COLOR_DISPLAY", 1))
or (not color_set
and hasattr(sys.stdout, "isatty")
and sys.stdout.isatty())):
import colorama
if get_env("PYCHARM_HOSTED"): # in PyCharm disable convert/strip
colorama.init(convert=False, strip=False)
else:
colorama.init()
color = True
else:
color = False
out = ConanOutput(sys.stdout, color)
user_io = UserIO(out=out)
try:
user_home = get_conan_user_home()
client_cache = migrate_and_get_client_cache(user_home, out)
sys.path.append(os.path.join(user_home, "python"))
except Exception as e:
out.error(str(e))
raise
with tools.environment_append(client_cache.conan_config.env_vars):
# Adjust CONAN_LOGGING_LEVEL with the env readed
conans.util.log.logger = configure_logger()
# Get the new command instance after migrations have been done
requester = get_basic_requester(client_cache)
_, _, remote_manager = ConanAPIV1.instance_remote_manager(
requester,
client_cache, user_io,
Version(client_version),
Version(MIN_SERVER_COMPATIBLE_VERSION))
# Adjust global tool variables
set_global_instances(out, requester)
# Settings preprocessor
if interactive is None:
interactive = not get_env("CONAN_NON_INTERACTIVE", False)
conan = ConanAPIV1(client_cache, user_io, get_conan_runner(), remote_manager,
interactive=interactive)
return conan, client_cache, user_io
def __init__(self, client_cache, user_io, runner, remote_manager, interactive=True):
assert isinstance(user_io, UserIO)
assert isinstance(client_cache, ClientCache)
self._client_cache = client_cache
self._user_io = user_io
self._runner = runner
self._remote_manager = remote_manager
self._registry = RemoteRegistry(self._client_cache.registry, self._user_io.out)
if not interactive:
self._user_io.disable_input()
self._proxy = ConanProxy(client_cache, self._user_io.out, remote_manager, registry=self._registry)
resolver = RangeResolver(self._user_io.out, client_cache, self._proxy)
python_requires = ConanPythonRequire(self._proxy, resolver)
self._loader = ConanFileLoader(self._runner, self._user_io.out, python_requires)
self._graph_manager = GraphManager(self._user_io.out, self._client_cache, self._registry,
self._remote_manager, self._loader, self._proxy, resolver)
def _init_manager(self, action_recorder):
"""Every api call gets a new recorder and new manager"""
return ConanManager(self._client_cache, self._user_io, self._runner,
self._remote_manager, action_recorder, self._registry,
self._graph_manager)
@api_method
def new(self, name, header=False, pure_c=False, test=False, exports_sources=False, bare=False,
cwd=None, visual_versions=None, linux_gcc_versions=None, linux_clang_versions=None,
osx_clang_versions=None, shared=None, upload_url=None, gitignore=None,
gitlab_gcc_versions=None, gitlab_clang_versions=None,
circleci_gcc_versions=None, circleci_clang_versions=None, circleci_osx_versions=None):
from conans.client.cmd.new import cmd_new
cwd = os.path.abspath(cwd or get_cwd())
files = cmd_new(name, header=header, pure_c=pure_c, test=test,
exports_sources=exports_sources, bare=bare,
visual_versions=visual_versions,
linux_gcc_versions=linux_gcc_versions,
linux_clang_versions=linux_clang_versions,
osx_clang_versions=osx_clang_versions, shared=shared,
upload_url=upload_url, gitignore=gitignore,
gitlab_gcc_versions=gitlab_gcc_versions,
gitlab_clang_versions=gitlab_clang_versions,
circleci_gcc_versions=circleci_gcc_versions,
circleci_clang_versions=circleci_clang_versions,
circleci_osx_versions=circleci_osx_versions)
save_files(cwd, files)
for f in sorted(files):
self._user_io.out.success("File saved: %s" % f)
@api_method
def test(self, path, reference, profile_name=None, settings=None, options=None, env=None,
remote_name=None, update=False, build_modes=None, cwd=None, test_build_folder=None):
settings = settings or []
options = options or []
env = env or []
conanfile_path = _get_conanfile_path(path, cwd, py=True)
cwd = cwd or get_cwd()
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
reference = ConanFileReference.loads(reference)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
pt = PackageTester(manager, self._user_io)
pt.install_build_and_test(conanfile_path, reference, profile, remote_name,
update, build_modes=build_modes,
test_build_folder=test_build_folder)
@api_method
def create(self, conanfile_path, name=None, version=None, user=None, channel=None,
profile_name=None, settings=None,
options=None, env=None, test_folder=None, not_export=False,
build_modes=None,
keep_source=False, keep_build=False, verify=None,
manifests=None, manifests_interactive=None,
remote_name=None, update=False, cwd=None, test_build_folder=None):
"""
API method to create a conan package
:param test_folder: default None - looks for default 'test' or 'test_package' folder),
string - test_folder path
False - disabling tests
"""
settings = settings or []
options = options or []
env = env or []
try:
cwd = cwd or os.getcwd()
recorder = ActionRecorder()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user, channel)
# Make sure keep_source is set for keep_build
keep_source = keep_source or keep_build
# Forcing an export!
if not not_export:
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out,
self._client_cache)
if build_modes is None: # Not specified, force build the tested library
build_modes = [conanfile.name]
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env,
cwd, self._client_cache)
manager = self._init_manager(recorder)
recorder.add_recipe_being_developed(reference)
create(reference, manager, self._user_io, profile, remote_name, update, build_modes,
manifest_folder, manifest_verify, manifest_interactive, keep_build,
test_build_folder, test_folder, conanfile_path)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def export_pkg(self, conanfile_path, name, channel, source_folder=None, build_folder=None,
package_folder=None, install_folder=None, profile_name=None, settings=None,
options=None, env=None, force=False, user=None, version=None, cwd=None):
settings = settings or []
options = options or []
env = env or []
cwd = cwd or get_cwd()
# Checks that info files exists if the install folder is specified
if install_folder and not existing_info_files(_make_abs_path(install_folder, cwd)):
raise ConanException("The specified install folder doesn't contain '%s' and '%s' "
"files" % (CONANINFO, BUILD_INFO))
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
if package_folder:
if build_folder or source_folder:
raise ConanException("package folder definition incompatible with build and source folders")
package_folder = _make_abs_path(package_folder, cwd)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
# Checks that no both settings and info files are specified
if install_folder and existing_info_files(install_folder) and \
(profile_name or settings or options or env):
raise ConanException("%s and %s are found, at '%s' folder, so specifying profile, "
"settings, options or env is not allowed" % (CONANINFO, BUILD_INFO,
install_folder))
infos_present = existing_info_files(install_folder)
if not infos_present:
profile = profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
else:
profile = read_conaninfo_profile(install_folder)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user, channel)
cmd_export(conanfile_path, conanfile, reference, False, self._user_io.out, self._client_cache)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
manager.export_pkg(reference, source_folder=source_folder, build_folder=build_folder,
package_folder=package_folder, install_folder=install_folder,
profile=profile, force=force)
@api_method
def download(self, reference, remote_name=None, package=None, recipe=False):
if package and recipe:
raise ConanException("recipe parameter cannot be used together with package")
# Install packages without settings (fixed ids or all)
conan_ref = ConanFileReference.loads(reference)
recorder = ActionRecorder()
download(conan_ref, package, remote_name, recipe, self._registry, self._remote_manager,
self._client_cache, self._user_io.out, recorder, self._loader)
@api_method
def install_reference(self, reference, settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
install_folder = _make_abs_path(install_folder, cwd)
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
if not generators: # We don't want the default txt
generators = False
mkdir(install_folder)
manager = self._init_manager(recorder)
manager.install(reference=reference, install_folder=install_folder,
remote_name=remote_name, profile=profile, build_modes=build,
update=update, manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def install(self, path="", settings=None, options=None, env=None,
remote_name=None, verify=None, manifests=None,
manifests_interactive=None, build=None, profile_name=None,
update=False, generators=None, no_imports=False, install_folder=None, cwd=None):
try:
recorder = ActionRecorder()
cwd = cwd or os.getcwd()
manifests = _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd)
manifest_folder, manifest_interactive, manifest_verify = manifests
profile = profile_from_args(profile_name, settings, options, env, cwd,
self._client_cache)
wspath = _make_abs_path(path, cwd)
if install_folder:
if os.path.isabs(install_folder):
wsinstall_folder = install_folder
else:
wsinstall_folder = os.path.join(cwd, install_folder)
else:
wsinstall_folder = None
workspace = Workspace.get_workspace(wspath, wsinstall_folder)
if workspace:
self._user_io.out.success("Using conanws.yml file from %s" % workspace._base_folder)
manager = self._init_manager(recorder)
manager.install_workspace(profile, workspace, remote_name, build, update)
return
install_folder = _make_abs_path(install_folder, cwd)
conanfile_path = _get_conanfile_path(path, cwd, py=None)
manager = self._init_manager(recorder)
manager.install(reference=conanfile_path,
install_folder=install_folder,
remote_name=remote_name,
profile=profile,
build_modes=build,
update=update,
manifest_folder=manifest_folder,
manifest_verify=manifest_verify,
manifest_interactive=manifest_interactive,
generators=generators,
no_imports=no_imports)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def config_get(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
self._user_io.out.info(config_parser.get_item(item))
return config_parser.get_item(item)
@api_method
def config_set(self, item, value):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.set_item(item, value)
self._client_cache.invalidate()
@api_method
def config_rm(self, item):
config_parser = ConanClientConfigParser(self._client_cache.conan_conf_path)
config_parser.rm_item(item)
self._client_cache.invalidate()
@api_method
def config_install(self, item, verify_ssl, config_type=None, args=None):
# _make_abs_path, but could be not a path at all
if item is not None and os.path.exists(item) and not os.path.isabs(item):
item = os.path.abspath(item)
from conans.client.conf.config_installer import configuration_install
return configuration_install(item, self._client_cache, self._user_io.out, verify_ssl, config_type, args)
def _info_get_profile(self, reference, install_folder, profile_name, settings, options, env):
cwd = get_cwd()
try:
reference = ConanFileReference.loads(reference)
except ConanException:
reference = _get_conanfile_path(reference, cwd=None, py=None)
if install_folder or not (profile_name or settings or options or env):
# When not install folder is specified but neither any setting, we try to read the
# info from cwd
install_folder = _make_abs_path(install_folder, cwd)
if existing_info_files(install_folder):
return reference, read_conaninfo_profile(install_folder)
return reference, profile_from_args(profile_name, settings, options, env=env,
cwd=cwd, client_cache=self._client_cache)
@api_method
def info_build_order(self, reference, settings=None, options=None, env=None,
profile_name=None, remote_name=None, build_order=None, check_updates=None,
install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name, settings,
options, env)
recorder = ActionRecorder()
deps_graph, _, _ = self._graph_manager.load_graph(reference, None, profile, ["missing"], check_updates,
False, remote_name, recorder, workspace=None)
return deps_graph.build_order(build_order)
@api_method
def info_nodes_to_build(self, reference, build_modes, settings=None, options=None, env=None,
profile_name=None, remote_name=None, check_updates=None, install_folder=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name, settings,
options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile, build_modes, check_updates,
False, remote_name, recorder, workspace=None)
nodes_to_build = deps_graph.nodes_to_build()
return nodes_to_build, conanfile
@api_method
def info(self, reference, remote_name=None, settings=None, options=None, env=None,
profile_name=None, update=False, install_folder=None, build=None):
reference, profile = self._info_get_profile(reference, install_folder, profile_name, settings,
options, env)
recorder = ActionRecorder()
deps_graph, conanfile, _ = self._graph_manager.load_graph(reference, None, profile, build, update,
False, remote_name, recorder, workspace=None)
return deps_graph, conanfile
@api_method
def build(self, conanfile_path, source_folder=None, package_folder=None, build_folder=None,
install_folder=None, should_configure=True, should_build=True, should_install=True,
should_test=True, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(conanfile_path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
manager.build(conanfile_path, source_folder, build_folder, package_folder,
install_folder, should_configure=should_configure, should_build=should_build,
should_install=should_install, should_test=should_test)
@api_method
def package(self, path, build_folder, package_folder, source_folder=None, install_folder=None, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
build_folder = _make_abs_path(build_folder, cwd)
install_folder = _make_abs_path(install_folder, cwd, default=build_folder)
source_folder = _make_abs_path(source_folder, cwd, default=os.path.dirname(conanfile_path))
default_pkg_folder = os.path.join(build_folder, "package")
package_folder = _make_abs_path(package_folder, cwd, default=default_pkg_folder)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
manager.local_package(package_folder, conanfile_path, build_folder, source_folder,
install_folder)
@api_method
def source(self, path, source_folder=None, info_folder=None, cwd=None):
cwd = cwd or get_cwd()
conanfile_path = _get_conanfile_path(path, cwd, py=True)
source_folder = _make_abs_path(source_folder, cwd)
info_folder = _make_abs_path(info_folder, cwd)
mkdir(source_folder)
if not os.path.exists(info_folder):
raise ConanException("Specified info-folder doesn't exist")
recorder = ActionRecorder()
manager = self._init_manager(recorder)
manager.source(conanfile_path, source_folder, info_folder)
@api_method
def imports(self, path, dest=None, info_folder=None, cwd=None):
"""
:param path: Path to the conanfile
:param dest: Dir to put the imported files. (Abs path or relative to cwd)
:param info_folder: Dir where the conaninfo.txt and conanbuildinfo.txt files are
:param cwd: Current working directory
:return: None
"""
cwd = cwd or get_cwd()
info_folder = _make_abs_path(info_folder, cwd)
dest = _make_abs_path(dest, cwd)
mkdir(dest)
conanfile_abs_path = _get_conanfile_path(path, cwd, py=None)
recorder = ActionRecorder()
manager = self._init_manager(recorder)
manager.imports(conanfile_abs_path, dest, info_folder)
@api_method
def imports_undo(self, manifest_path):
cwd = get_cwd()
manifest_path = _make_abs_path(manifest_path, cwd)
undo_imports(manifest_path, self._user_io.out)
@api_method
def export(self, path, name, version, user, channel, keep_source=False, cwd=None):
conanfile_path = _get_conanfile_path(path, cwd, py=True)
reference, conanfile = self._loader.load_export(conanfile_path, name, version, user, channel)
cmd_export(conanfile_path, conanfile, reference, keep_source, self._user_io.out, self._client_cache)
@api_method
def remove(self, pattern, query=None, packages=None, builds=None, src=False, force=False,
remote_name=None, outdated=False):
remover = ConanRemover(self._client_cache, self._remote_manager, self._user_io, self._registry)
remover.remove(pattern, remote_name, src, builds, packages, force=force,
packages_query=query, outdated=outdated)
@api_method
def copy(self, reference, user_channel, force=False, packages=None):
"""
param packages: None=No binaries, True=All binaries, else list of IDs
"""
from conans.client.cmd.copy import cmd_copy
# FIXME: conan copy does not support short-paths in Windows
reference = ConanFileReference.loads(str(reference))
cmd_copy(reference, user_channel, packages, self._client_cache,
self._user_io, self._remote_manager, self._registry, self._loader, force=force)
@api_method
def authenticate(self, name, password, remote_name):
remote = self.get_remote_by_name(remote_name)
_, remote_name, prev_user, user = self._remote_manager.authenticate(remote, name, password)
return remote_name, prev_user, user
@api_method
def user_set(self, user, remote_name=None):
remote = self.get_default_remote() if not remote_name else self.get_remote_by_name(remote_name)
return user_set(self._client_cache.localdb, user, remote)
@api_method
def users_clean(self):
users_clean(self._client_cache.localdb)
@api_method
def users_list(self, remote_name=None):
info = {"error": False, "remotes": []}
remotes = [self.get_remote_by_name(remote_name)] if remote_name else self.remote_list()
try:
info["remotes"] = users_list(self._client_cache.localdb, remotes)
return info
except ConanException as exc:
info["error"] = True
exc.info = info
raise
@api_method
def search_recipes(self, pattern, remote_name=None, case_sensitive=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
references = search.search_recipes(pattern, remote_name, case_sensitive)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, refs in references.items():
for ref in refs:
recorder.add_recipe(remote_name, ref, with_packages=False)
return recorder.get_info()
@api_method
def search_packages(self, reference, query=None, remote_name=None, outdated=False):
recorder = SearchRecorder()
search = Search(self._client_cache, self._remote_manager, self._registry)
try:
reference = ConanFileReference.loads(str(reference))
references = search.search_packages(reference, remote_name,
query=query,
outdated=outdated)
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
for remote_name, remote_ref in references.items():
recorder.add_recipe(remote_name, reference)
if remote_ref.ordered_packages:
for package_id, properties in remote_ref.ordered_packages.items():
package_recipe_hash = properties.get("recipe_hash", None)
recorder.add_package(remote_name, reference, package_id,
properties.get("options", []),
properties.get("settings", []),
properties.get("full_requires", []),
remote_ref.recipe_hash != package_recipe_hash)
return recorder.get_info()
@api_method
def upload(self, pattern, package=None, remote_name=None,
all_packages=False, force=False, confirm=False, retry=2,
retry_wait=5, skip_upload=False, integrity_check=False,
no_overwrite=None, query=None):
""" Uploads a package recipe and the generated binary packages to a specified remote
"""
recorder = UploadRecorder()
if force and no_overwrite:
exc = ConanException("'no_overwrite' argument cannot be used together with 'force'")
recorder.error = True
exc.info = recorder.get_info()
raise exc
uploader = CmdUpload(self._client_cache, self._user_io, self._remote_manager,
self._registry, self._loader)
try:
uploader.upload(recorder, pattern, package, all_packages, force, confirm, retry,
retry_wait, skip_upload, integrity_check, no_overwrite, remote_name,
query=query)
return recorder.get_info()
except ConanException as exc:
recorder.error = True
exc.info = recorder.get_info()
raise
@api_method
def remote_list(self):
return self._registry.remotes
@api_method
def remote_add(self, remote_name, url, verify_ssl=True, insert=None, force=None):
return self._registry.add(remote_name, url, verify_ssl, insert, force)
@api_method
def remote_remove(self, remote_name):
return self._registry.remove(remote_name)
@api_method
def remote_update(self, remote_name, url, verify_ssl=True, insert=None):
return self._registry.update(remote_name, url, verify_ssl, insert)
@api_method
def remote_rename(self, remote_name, new_new_remote):
return self._registry.rename(remote_name, new_new_remote)
@api_method
def remote_list_ref(self):
return self._registry.refs
@api_method
def remote_add_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference))
return self._registry.set_ref(reference, remote_name, check_exists=True)
@api_method
def remote_remove_ref(self, reference):
reference = ConanFileReference.loads(str(reference))
return self._registry.remove_ref(reference)
@api_method
def remote_update_ref(self, reference, remote_name):
reference = ConanFileReference.loads(str(reference))
return self._registry.update_ref(reference, remote_name)
@api_method
def profile_list(self):
return cmd_profile_list(self._client_cache.profiles_path, self._user_io.out)
@api_method
def create_profile(self, profile_name, detect=False):
return cmd_profile_create(profile_name, self._client_cache.profiles_path,
self._user_io.out, detect)
@api_method
def update_profile(self, profile_name, key, value):
return cmd_profile_update(profile_name, key, value, self._client_cache.profiles_path)
@api_method
def get_profile_key(self, profile_name, key):
return cmd_profile_get(profile_name, key, self._client_cache.profiles_path)
@api_method
def delete_profile_key(self, profile_name, key):
return cmd_profile_delete_key(profile_name, key, self._client_cache.profiles_path)
@api_method
def read_profile(self, profile=None):
p, _ = read_profile(profile, get_cwd(), self._client_cache.profiles_path)
return p
@api_method
def get_path(self, reference, package_id=None, path=None, remote_name=None):
from conans.client.local_file_getter import get_path
reference = ConanFileReference.loads(str(reference))
if not path:
path = "conanfile.py" if not package_id else "conaninfo.txt"
if not remote_name:
return get_path(self._client_cache, reference, package_id, path), path
else:
remote = self.get_remote_by_name(remote_name)
return self._remote_manager.get_path(reference, package_id, path, remote), path
@api_method
def export_alias(self, reference, target_reference):
reference = ConanFileReference.loads(reference)
target_reference = ConanFileReference.loads(target_reference)
return export_alias(reference, target_reference, self._client_cache)
@api_method
def get_default_remote(self):
return self._registry.default_remote
@api_method
def get_remote_by_name(self, remote_name):
return self._registry.remote(remote_name)
Conan = ConanAPIV1
def _parse_manifests_arguments(verify, manifests, manifests_interactive, cwd):
if manifests and manifests_interactive:
raise ConanException("Do not specify both manifests and "
"manifests-interactive arguments")
if verify and (manifests or manifests_interactive):
raise ConanException("Do not specify both 'verify' and "
"'manifests' or 'manifests-interactive' arguments")
manifest_folder = verify or manifests or manifests_interactive
if manifest_folder:
if not os.path.isabs(manifest_folder):
if not cwd:
raise ConanException("'cwd' should be defined if the manifest folder is relative.")
manifest_folder = os.path.join(cwd, manifest_folder)
manifest_verify = verify is not None
manifest_interactive = manifests_interactive is not None
else:
manifest_verify = manifest_interactive = False
return manifest_folder, manifest_interactive, manifest_verify
def get_conan_runner():
print_commands_to_output = get_env("CONAN_PRINT_RUN_COMMANDS", False)
generate_run_log_file = get_env("CONAN_LOG_RUN_TO_FILE", False)
log_run_to_output = get_env("CONAN_LOG_RUN_TO_OUTPUT", True)
runner = ConanRunner(print_commands_to_output, generate_run_log_file, log_run_to_output)
return runner
def migrate_and_get_client_cache(base_folder, out, storage_folder=None):
# Init paths
client_cache = ClientCache(base_folder, storage_folder, out)
# Migration system
migrator = ClientMigrator(client_cache, Version(client_version), out)
migrator.migrate()
return client_cache
| 45.219457
| 119
| 0.654425
|
a1b6ee05d946436eda86426425afb7968a9938df
| 12,445
|
py
|
Python
|
sdk/python/pulumi_azure_native/attestation/attestation_provider.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/attestation/attestation_provider.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/attestation/attestation_provider.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['AttestationProviderArgs', 'AttestationProvider']
@pulumi.input_type
class AttestationProviderArgs:
def __init__(__self__, *,
properties: pulumi.Input['AttestationServiceCreationSpecificParamsArgs'],
resource_group_name: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a AttestationProvider resource.
:param pulumi.Input['AttestationServiceCreationSpecificParamsArgs'] properties: Properties of the attestation provider
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] location: The supported Azure location where the attestation provider should be created.
:param pulumi.Input[str] provider_name: Name of the attestation provider.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags that will be assigned to the attestation provider.
"""
pulumi.set(__self__, "properties", properties)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if location is not None:
pulumi.set(__self__, "location", location)
if provider_name is not None:
pulumi.set(__self__, "provider_name", provider_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter
def properties(self) -> pulumi.Input['AttestationServiceCreationSpecificParamsArgs']:
"""
Properties of the attestation provider
"""
return pulumi.get(self, "properties")
@properties.setter
def properties(self, value: pulumi.Input['AttestationServiceCreationSpecificParamsArgs']):
pulumi.set(self, "properties", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The supported Azure location where the attestation provider should be created.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="providerName")
def provider_name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the attestation provider.
"""
return pulumi.get(self, "provider_name")
@provider_name.setter
def provider_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provider_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
The tags that will be assigned to the attestation provider.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class AttestationProvider(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['AttestationServiceCreationSpecificParamsArgs']]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
"""
Attestation service response message.
API Version: 2020-10-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] location: The supported Azure location where the attestation provider should be created.
:param pulumi.Input[pulumi.InputType['AttestationServiceCreationSpecificParamsArgs']] properties: Properties of the attestation provider
:param pulumi.Input[str] provider_name: Name of the attestation provider.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: The tags that will be assigned to the attestation provider.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AttestationProviderArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Attestation service response message.
API Version: 2020-10-01.
:param str resource_name: The name of the resource.
:param AttestationProviderArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AttestationProviderArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
location: Optional[pulumi.Input[str]] = None,
properties: Optional[pulumi.Input[pulumi.InputType['AttestationServiceCreationSpecificParamsArgs']]] = None,
provider_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AttestationProviderArgs.__new__(AttestationProviderArgs)
__props__.__dict__["location"] = location
if properties is None and not opts.urn:
raise TypeError("Missing required property 'properties'")
__props__.__dict__["properties"] = properties
__props__.__dict__["provider_name"] = provider_name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["tags"] = tags
__props__.__dict__["attest_uri"] = None
__props__.__dict__["name"] = None
__props__.__dict__["private_endpoint_connections"] = None
__props__.__dict__["status"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["trust_model"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:attestation:AttestationProvider"), pulumi.Alias(type_="azure-native:attestation/v20180901preview:AttestationProvider"), pulumi.Alias(type_="azure-nextgen:attestation/v20180901preview:AttestationProvider"), pulumi.Alias(type_="azure-native:attestation/v20201001:AttestationProvider"), pulumi.Alias(type_="azure-nextgen:attestation/v20201001:AttestationProvider")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(AttestationProvider, __self__).__init__(
'azure-native:attestation:AttestationProvider',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'AttestationProvider':
"""
Get an existing AttestationProvider resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = AttestationProviderArgs.__new__(AttestationProviderArgs)
__props__.__dict__["attest_uri"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["private_endpoint_connections"] = None
__props__.__dict__["status"] = None
__props__.__dict__["system_data"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["trust_model"] = None
__props__.__dict__["type"] = None
return AttestationProvider(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="attestUri")
def attest_uri(self) -> pulumi.Output[Optional[str]]:
"""
Gets the uri of attestation service
"""
return pulumi.get(self, "attest_uri")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateEndpointConnections")
def private_endpoint_connections(self) -> pulumi.Output[Sequence['outputs.PrivateEndpointConnectionResponse']]:
"""
List of private endpoint connections associated with the attestation provider.
"""
return pulumi.get(self, "private_endpoint_connections")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
"""
Status of attestation service.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
The system metadata relating to this resource
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="trustModel")
def trust_model(self) -> pulumi.Output[Optional[str]]:
"""
Trust model for the attestation provider.
"""
return pulumi.get(self, "trust_model")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
| 43.062284
| 450
| 0.656087
|
72b12bccea449e89acbba1b4e58c9da67c4421e3
| 10,444
|
py
|
Python
|
book_api/tests/test_views_books.py
|
musflood/zonar-book-api
|
97e8ef4b52b03ac70285acc942f0ca5b775f2061
|
[
"MIT"
] | null | null | null |
book_api/tests/test_views_books.py
|
musflood/zonar-book-api
|
97e8ef4b52b03ac70285acc942f0ca5b775f2061
|
[
"MIT"
] | null | null | null |
book_api/tests/test_views_books.py
|
musflood/zonar-book-api
|
97e8ef4b52b03ac70285acc942f0ca5b775f2061
|
[
"MIT"
] | null | null | null |
"""Unit tests for the Book view functions."""
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPForbidden
from book_api.models.book import Book
from book_api.tests.conftest import FAKE
from book_api.views.books import (
_create_book, _delete_book, _list_books, _update_book, validate_user)
def test_validate_user_raises_error_for_incomplete_data(dummy_request):
"""Test that validate_user raises HTTPBadRequest for missing password."""
data = {
'email': FAKE.email()
}
with pytest.raises(HTTPBadRequest):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_email_not_in_database(dummy_request):
"""Test that validate_user raises HTTPForbidden for bad email."""
data = {
'email': FAKE.email(),
'password': 'password'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_raises_error_for_incorrect_password(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'notthepassword'
}
with pytest.raises(HTTPForbidden):
validate_user(dummy_request.dbsession, data)
def test_validate_user_returns_user_matching_email(dummy_request, db_session, one_user):
"""Test that validate_user raises HTTPForbidden for bad email."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password'
}
auth_user = validate_user(dummy_request.dbsession, data)
assert auth_user is one_user
def test_list_empty_for_user_with_no_books(dummy_request, db_session, one_user):
"""Test that list returns empty list for user with no books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert books == []
def test_create_raises_error_for_incomplete_post_data(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for missing title."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that create raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_create_book(dummy_request, one_user)
def test_create_adds_new_book_to_the_database(dummy_request, db_session, one_user):
"""Test that create adds a new Book to the database."""
db_session.add(one_user)
assert len(db_session.query(Book).all()) == 0
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
_create_book(dummy_request, one_user)
assert len(db_session.query(Book).all()) == 1
def test_create_returns_dict_with_new_book_data(dummy_request, db_session, one_user):
"""Test that create returns dict with the new Book's data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_create_creates_new_book_using_post_data(dummy_request, db_session, one_user):
"""Test that create uses POST data to create the new Book."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
for prop in ['title', 'author', 'isbn']:
assert getattr(new_book, prop) == data[prop]
assert new_book.pub_date.strftime('%m/%d/%Y') == data['pub_date']
def test_create_sets_email_user_as_owner_of_new_book(dummy_request, db_session, one_user):
"""Test that create uses email from POST data to set Book owner."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
new_book = db_session.query(Book).get(res['id'])
assert one_user is new_book.user
def test_create_creates_new_book_with_none_values(dummy_request, db_session, one_user):
"""Test that create sets values to None when not given."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
}
dummy_request.POST = data
res = _create_book(dummy_request, one_user)
assert res['author'] is None
assert res['isbn'] is None
assert res['pub_date'] is None
def test_list_has_all_books_for_user(dummy_request, db_session, one_user):
"""Test that list returns filled list for user with multiple books."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
books = _list_books(dummy_request, one_user)
assert len(books) == len(one_user.books)
def test_list_returns_list_of_dict_with_book_data(dummy_request, db_session, one_user):
"""Test that list returns list of dict with the user Book data."""
db_session.add(one_user)
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.GET = data
res = _list_books(dummy_request, one_user)
for book in res:
assert all(prop in book for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_update_raises_error_for_bad_date_format(dummy_request, db_session, one_user):
"""Test that update raises HTTPBadRequest for incorrect date format."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'pub_date': FAKE.date(pattern='%Y-%m-%d')
}
dummy_request.POST = data
with pytest.raises(HTTPBadRequest):
_update_book(dummy_request, book)
def test_update_changes_single_value_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
new_author = FAKE.name()
assert new_author != book.author
data = {
'email': one_user.email,
'password': 'password',
'author': new_author
}
dummy_request.POST = data
_update_book(dummy_request, book)
assert book.author == new_author
def test_update_changes_all_values_for_given_book_using_post_data(dummy_request, db_session, one_user):
"""Test that update changes the values on the given book from POST data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) != data[prop]
dummy_request.POST = data
_update_book(dummy_request, book)
for prop in ['title', 'author', 'isbn', 'pub_date']:
assert getattr(book, prop) == data[prop]
def test_update_returns_dict_with_updated_book_data(dummy_request, db_session, one_user):
"""Test that update returns dict with the new Book's data."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
'title': FAKE.sentence(nb_words=3),
'author': FAKE.name(),
'isbn': FAKE.isbn13(separator="-"),
'pub_date': FAKE.date(pattern='%m/%d/%Y')
}
dummy_request.POST = data
res = _update_book(dummy_request, book)
assert isinstance(res, dict)
assert all(prop in res for prop in
['id', 'title', 'author', 'isbn', 'pub_date'])
def test_delete_returns_nothing(dummy_request, db_session, one_user):
"""Test that delete returns None."""
db_session.add(one_user)
book = db_session.query(Book).first()
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
res = _delete_book(dummy_request, book)
assert res is None
def test_delete_removes_book_from_database(dummy_request, db_session, one_user):
"""Test that delete removes the given book from the database."""
db_session.add(one_user)
book = db_session.query(Book).first()
book_id = book.id
data = {
'email': one_user.email,
'password': 'password',
}
dummy_request.POST = data
_delete_book(dummy_request, book)
db_session.commit()
assert db_session.query(Book).get(book_id) is None
| 32.234568
| 105
| 0.662581
|
8c6c9dc735641b1bbbac95e1be04188dc9e45f66
| 407
|
py
|
Python
|
powerdns/migrations/0033_auto_20161114_1442.py
|
mike-johnson-jr/django-powerdns-dnssec
|
c96973ad1d619e03299fb9f26eb43064a584711b
|
[
"BSD-2-Clause"
] | 32
|
2015-06-12T12:26:40.000Z
|
2021-11-11T13:43:29.000Z
|
powerdns/migrations/0033_auto_20161114_1442.py
|
mike-johnson-jr/django-powerdns-dnssec
|
c96973ad1d619e03299fb9f26eb43064a584711b
|
[
"BSD-2-Clause"
] | 160
|
2015-07-09T11:53:51.000Z
|
2019-10-17T05:21:30.000Z
|
powerdns/migrations/0033_auto_20161114_1442.py
|
mike-johnson-jr/django-powerdns-dnssec
|
c96973ad1d619e03299fb9f26eb43064a584711b
|
[
"BSD-2-Clause"
] | 20
|
2015-06-29T12:32:47.000Z
|
2019-11-21T16:53:28.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('powerdns', '0032_auto_20161102_2006'),
]
operations = [
migrations.AlterUniqueTogether(
name='domainowner',
unique_together=set([('domain', 'owner', 'ownership_type')]),
),
]
| 21.421053
| 73
| 0.621622
|
ed186ba769f906f86bd6ebfce4517354492b40e3
| 3,271
|
py
|
Python
|
ner/arguments.py
|
ROBINADC/BiGRU-CRF-with-Attention-for-NER
|
b9e037ebd6e1d56500ffb60c6030013982c17ded
|
[
"MIT"
] | 27
|
2020-10-04T07:59:25.000Z
|
2022-03-31T03:35:35.000Z
|
ner/arguments.py
|
janerjzou/BiGRU-CRF-with-Attention-for-NER
|
b9e037ebd6e1d56500ffb60c6030013982c17ded
|
[
"MIT"
] | 6
|
2020-12-28T02:47:58.000Z
|
2021-11-06T03:47:22.000Z
|
ner/arguments.py
|
janerjzou/BiGRU-CRF-with-Attention-for-NER
|
b9e037ebd6e1d56500ffb60c6030013982c17ded
|
[
"MIT"
] | 12
|
2020-06-28T15:48:17.000Z
|
2022-03-20T19:40:58.000Z
|
# -*- coding: utf-8 -*-
"""
Arguments
Created on 2020/5/13
"""
__author__ = "Yihang Wu"
class Arguments:
"""
General settings of arguments
"""
# Device
device = 'cuda'
# Path
raw_data_dir = '../data/raw'
raw_data_train = raw_data_dir + '/train.csv'
raw_data_val = raw_data_dir + '/val.csv'
raw_data_test = raw_data_dir + '/test.csv'
data_dir = '../data'
dataset_path = data_dir + '/dataset.pkl'
lookup_path = data_dir + '/lookup.pkl'
padded_dataset_path = data_dir + '/padded_dataset.pkl'
result_dir = '../result'
event_dir = result_dir + '/event'
ckpt_dir = result_dir + '/ckpt'
embed_dir = result_dir + '/embed'
embed_word_path = embed_dir + '/word.npy' # 48
embed_char_path = embed_dir + '/char.npy' # 48
embed_pos_path = embed_dir + '/pos.npy' # 32
embed_xavier_path = embed_dir + '/xavier.npy' # 128
embedding_paths = ( # these embeddings will be concatenated [CHECK IT]
embed_word_path, embed_char_path, embed_pos_path
)
# embedding_paths = (embed_xavier_path,) # for base comparing
# Special tokens and corresponding _indexes
word_pad = ('<pad>', 0)
word_oov = ('<oov>', 1)
entity_pad = ('<p>', 0)
entity_bos = ('<bos>', 1)
entity_eos = ('<eos>', 2)
# Train
use_pretrained_embeddings = True # [CHECK IT]
finished_epoch = 0
num_epochs = 100
batch_size = 64
weight_decay = 0.001
lr = 1e-3
min_lr = 5e-5
lr_decay_factor = 0.95
# Model Common Part
num_vocabs = None # set automatically
num_entities = None # set automatically
embed_dim = 128 # embedding size [CHECK IT]
model_dim = 256
# Early Stop
min_delta = 0.
patience = 6
# Test
test_ckpt = ckpt_dir + '/gru_attn_crf_1block/ckpt_epoch_21.pt' # [CHECK IT]
test_batch_size = 200
write_to_csv = True
csv_dir = result_dir + '/csv'
class AttnCRFArguments(Arguments):
"""
Arguments for Attention-CRF model
GammaAttnCRF in model.py
"""
model_name = 'attn_crf'
model_dim = 128
attention_type = 'scaled_dot'
num_blocks = 1
num_heads = 4
ff_hidden_dim = 512
dropout_rate = 0.2
class GRUCRFArguments(Arguments):
"""
Arguments for BiGRU-CRF model
GammaGRUCRF in model.py
"""
model_name = 'gru_crf'
gru_hidden_dim = 100
class GRUAttnCRFArguments(Arguments):
"""
Arguments for BiGRU-Attention-CRF model
GammaGRUAttnCRF in model.py
"""
model_name = 'gru_attn_crf'
attention_type = 'scaled_dot' # {dot, scaled_dot, cosine, general} tested
num_blocks = 1 # {1, 2, 3} tested
num_heads = 4
ff_hidden_dim = 512
dropout_rate = 0.2
gru_hidden_dim = Arguments.model_dim // 2
class GRUArguments(Arguments):
"""
Arguments for BiGRU model
GammaGRU in model.py
"""
model_name = 'gru'
gru_hidden_dim = 120
class GRUAttnArguments(Arguments):
"""
Arguments for BiGRU-Attention model
GammaGRUAttn in model.py
"""
model_name = 'gru_attn'
attention_type = 'scaled_dot'
num_blocks = 1 # {1, 2, 3} tested
num_heads = 4
ff_hidden_dim = 512
dropout_rate = 0.2
gru_hidden_dim = Arguments.model_dim // 2
| 22.558621
| 80
| 0.633445
|
83a23b6d8b6a05fa1e050f8453dfdc1ebc141c1b
| 14,585
|
py
|
Python
|
qcfractal/storage_sockets/db_queries.py
|
alongd/QCFractal
|
f40dd1a9efa0e0fa44746e8db6dba28967ac0202
|
[
"BSD-3-Clause"
] | null | null | null |
qcfractal/storage_sockets/db_queries.py
|
alongd/QCFractal
|
f40dd1a9efa0e0fa44746e8db6dba28967ac0202
|
[
"BSD-3-Clause"
] | null | null | null |
qcfractal/storage_sockets/db_queries.py
|
alongd/QCFractal
|
f40dd1a9efa0e0fa44746e8db6dba28967ac0202
|
[
"BSD-3-Clause"
] | null | null | null |
from typing import List, Optional, Set, Union
from sqlalchemy import Integer, inspect
from sqlalchemy.sql import bindparam, text
from qcfractal.interface.models import Molecule, ResultRecord
from qcfractal.storage_sockets.models import MoleculeORM, ResultORM
QUERY_CLASSES = set()
class QueryBase:
# The name/alias used by the REST APIs to access this class
_class_name = None
_available_groupby = set()
# Mapping of the requested feature and the internal query method
_query_method_map = {}
def __init__(self, database_name, max_limit=1000):
self.database_name = database_name
self.max_limit = max_limit
def __init_subclass__(cls, **kwargs):
if cls not in QUERY_CLASSES:
QUERY_CLASSES.add(cls)
super().__init_subclass__(**kwargs)
def query(self, session, query_key, limit=0, skip=0, include=None, exclude=None, **kwargs):
if query_key not in self._query_method_map:
raise TypeError(f"Query type {query_key} is unimplemented for class {self._class_name}")
self.session = session
return getattr(self, self._query_method_map[query_key])(**kwargs)
def execute_query(self, sql_statement, with_keys=True, **kwargs):
"""Execute sql statemet, apply limit, and return results as dict if needed"""
# TODO: check count first, way to iterate
# sql_statement += f' LIMIT {self.max_limit}'
result = self.session.execute(sql_statement, kwargs)
keys = result.keys() # get keys before fetching
result = result.fetchall()
self.session.commit()
# create a list of dict with the keys and values of the results (instead of tuples)
if with_keys:
result = [dict(zip(keys, res)) for res in result]
return result
def _base_count(self, table_name: str, available_groupbys: Set[str], groupby: Optional[List[str]] = None):
if groupby:
bad_groups = set(groupby) - available_groupbys
if bad_groups:
raise AttributeError(f"The following groups are not permissible: {missing}")
global_str = ", ".join(groupby)
select_str = global_str + ", "
extra_str = f"""GROUP BY {global_str}\nORDER BY {global_str}"""
else:
select_str = ""
extra_str = ""
sql_statement = f"""
select {select_str}count(*) from {table_name}
{extra_str}
"""
ret = self.execute_query(sql_statement, with_keys=True)
if groupby:
return ret
else:
return ret[0]["count"]
@staticmethod
def _raise_missing_attribute(cls, query_key, missing_attribute, amend_msg=""):
"""Raises error for missing attribute in a message suitable for the REST user"""
raise AttributeError(f"To query {cls._class_name} for {query_key} " f"you must provide {missing_attribute}.")
# ----------------------------------------------------------------------------
class TaskQueries(QueryBase):
_class_name = "task"
_query_method_map = {"counts": "_task_counts"}
def _task_counts(self):
sql_statement = f"""
SELECT tag, priority, status, count(*)
FROM task_queue
WHERE True
group by tag, priority, status
order by tag, priority, status
"""
return self.execute_query(sql_statement, with_keys=True)
# ----------------------------------------------------------------------------
class DatabaseStatQueries(QueryBase):
_class_name = "database_stats"
_query_method_map = {
"table_count": "_table_count",
"database_size": "_database_size",
"table_information": "_table_information",
}
def _table_count(self, table_name=None):
if table_name is None:
self._raise_missing_attribute("table_name", "table name")
sql_statement = f"SELECT count(*) from {table_name}"
return self.execute_query(sql_statement, with_keys=False)[0]
def _database_size(self):
sql_statement = f"SELECT pg_database_size('{self.database_name}')"
return self.execute_query(sql_statement, with_keys=True)[0]["pg_database_size"]
def _table_information(self):
sql_statement = f"""
SELECT relname AS table_name
, c.reltuples::BIGINT AS row_estimate
, pg_total_relation_size(c.oid) AS total_bytes
, pg_indexes_size(c.oid) AS index_bytes
, pg_total_relation_size(reltoastrelid) AS toast_bytes
FROM pg_class c
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
WHERE relkind = 'r';
"""
result = self.execute_query(sql_statement, with_keys=False)
ret = []
for row in result:
if ("pg_" in row[0]) or ("sql_" in row[0]):
continue
ret.append(list(row))
ret = {"columns": ["table_name", "row_estimate", "total_bytes", "index_bytes", "toast_bytes"], "rows": ret}
return ret
class ResultQueries(QueryBase):
_class_name = "result"
_query_method_map = {"count": "_count"}
def _count(self, groupby: Optional[List[str]] = None):
available_groupbys = {"result_type", "status"}
return self._base_count("base_result", available_groupbys, groupby=groupby)
class MoleculeQueries(QueryBase):
_class_name = "molecule"
_query_method_map = {"count": "_count"}
def _count(self, groupby: Optional[List[str]] = None):
available_groupbys = set()
return self._base_count("molecule", available_groupbys, groupby=groupby)
# ----------------------------------------------------------------------------
class TorsionDriveQueries(QueryBase):
_class_name = "torsiondrive"
_query_method_map = {
"initial_molecules": "_get_initial_molecules",
"initial_molecules_ids": "_get_initial_molecules_ids",
"final_molecules": "_get_final_molecules",
"final_molecules_ids": "_get_final_molecules_ids",
"return_results": "_get_return_results",
}
def _get_initial_molecules_ids(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("initial_molecules_ids", "torsion drive id")
sql_statement = f"""
select initial_molecule from optimization_procedure as opt where opt.id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
order by opt.id
"""
return self.execute_query(sql_statement, with_keys=False)
def _get_initial_molecules(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("initial_molecules", "torsion drive id")
sql_statement = f"""
select molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.initial_molecule
where opt.id in
(select opt_id from optimization_history where torsion_id = {torsion_id})
"""
return self.execute_query(sql_statement, with_keys=True)
def _get_final_molecules_ids(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("final_molecules_ids", "torsion drive id")
sql_statement = f"""
select final_molecule from optimization_procedure as opt where opt.id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
order by opt.id
"""
return self.execute_query(sql_statement, with_keys=False)
def _get_final_molecules(self, torsion_id=None):
if torsion_id is None:
self._raise_missing_attribute("final_molecules", "torsion drive id")
sql_statement = f"""
select molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.final_molecule
where opt.id in
(select opt_id from optimization_history where torsion_id = {torsion_id})
"""
return self.execute_query(sql_statement, with_keys=True)
def _get_return_results(self, torsion_id=None):
"""All return results ids of a torsion drive"""
if torsion_id is None:
self._raise_missing_attribute("return_results", "torsion drive id")
sql_statement = f"""
select opt_res.opt_id, result.id as result_id, result.return_result from result
join opt_result_association as opt_res
on result.id = opt_res.result_id
where opt_res.opt_id in
(
select opt_id from optimization_history where torsion_id = {torsion_id}
)
"""
return self.execute_query(sql_statement, with_keys=False)
class OptimizationQueries(QueryBase):
_class_name = "optimization"
_exclude = ["molecule_hash", "molecular_formula", "result_type"]
_query_method_map = {
"all_results": "_get_all_results",
"final_result": "_get_final_results",
"initial_molecule": "_get_initial_molecules",
"final_molecule": "_get_final_molecules",
}
def _remove_excluded_keys(self, data):
for key in self._exclude:
data.pop(key, None)
def _get_all_results(self, optimization_ids: List[Union[int, str]] = None):
"""Returns all the results objects (trajectory) of each optmization
Returns list(list) """
if optimization_ids is None:
self._raise_missing_attribute("all_results", "List of optimizations ids")
# row_to_json(result.*)
sql_statement = text(
"""
select * from base_result
join (
select opt_id, result.* from result
join opt_result_association as traj
on result.id = traj.result_id
where traj.opt_id in :optimization_ids
) result
on base_result.id = result.id
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(ResultORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
if key not in ret:
ret[key] = []
ret[key].append(ResultRecord(**rec))
return ret
def _get_final_results(self, optimization_ids: List[Union[int, str]] = None):
"""Return the actual results objects of the best result in each optimization"""
if optimization_ids is None:
self._raise_missing_attribute("final_result", "List of optimizations ids")
sql_statement = text(
"""
select * from base_result
join (
select opt_id, result.* from result
join (
select opt.opt_id, opt.result_id, max_pos from opt_result_association as opt
inner join (
select opt_id, max(position) as max_pos from opt_result_association
where opt_id in :optimization_ids
group by opt_id
) opt2
on opt.opt_id = opt2.opt_id and opt.position = opt2.max_pos
) traj
on result.id = traj.result_id
) result
on base_result.id = result.id
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(ResultORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
ret[key] = ResultRecord(**rec)
return ret
def _get_initial_molecules(self, optimization_ids=None):
if optimization_ids is None:
self._raise_missing_attribute("initial_molecule", "List of optimizations ids")
sql_statement = text(
"""
select opt.id as opt_id, molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.initial_molecule
where opt.id in :optimization_ids
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(MoleculeORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
ret[key] = Molecule(**rec)
return ret
def _get_final_molecules(self, optimization_ids=None):
if optimization_ids is None:
self._raise_missing_attribute("final_molecule", "List of optimizations ids")
sql_statement = text(
"""
select opt.id as opt_id, molecule.* from molecule
join optimization_procedure as opt
on molecule.id = opt.final_molecule
where opt.id in :optimization_ids
"""
)
# bind and expand ids list
sql_statement = sql_statement.bindparams(bindparam("optimization_ids", expanding=True))
# column types:
columns = inspect(MoleculeORM).columns
sql_statement = sql_statement.columns(opt_id=Integer, *columns)
query_result = self.execute_query(sql_statement, optimization_ids=list(optimization_ids))
ret = {}
for rec in query_result:
self._remove_excluded_keys(rec)
key = rec.pop("opt_id")
ret[key] = Molecule(**rec)
return ret
| 33.299087
| 117
| 0.610559
|
84d32e4cd18888f42a82bbdc3ccd4915db53aea0
| 12,969
|
py
|
Python
|
sbadmin/settings/__init__.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
sbadmin/settings/__init__.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
sbadmin/settings/__init__.py
|
Nels885/csd_dashboard
|
aa5a3b970c50a2a93af722f962bd87c3728f233c
|
[
"MIT"
] | null | null | null |
"""
Django settings for sbadmin project.
Generated by 'django-admin startproject' using Django 2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import shutil
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = 'q2sb*vqltbpi59f#l2c&mak*%h&xzv1)i^#e0_as^cmx^-9)8x'
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = True
#
# ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
# django apps
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'ckeditor',
'tempus_dominus',
'bootstrap_modal_forms',
'widget_tweaks',
'constance',
'django_inlinecss',
'celery_progress',
'django_celery_beat',
'django_celery_results',
'encrypted_fields',
# My apps
'dashboard.apps.DashboardConfig',
'raspeedi.apps.RaspeediConfig',
'squalaetp.apps.SqualaetpConfig',
'reman.apps.RemanConfig',
'tools.apps.ToolsConfig',
'api.apps.ApiConfig',
'import_export.apps.ImportExportConfig',
'psa.apps.PsaConfig',
'ford.apps.FordConfig',
'renault.apps.RenaultConfig',
'vag.apps.VagConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'crum.CurrentRequestUserMiddleware',
]
ROOT_URLCONF = 'sbadmin.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(os.path.dirname(BASE_DIR), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.i18n',
'sbadmin.context_processors.get_release',
'sbadmin.context_processors.get_ip',
'sbadmin.context_processors.get_admin_emails'
],
},
},
]
WSGI_APPLICATION = 'sbadmin.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
# }
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# API REST Framework
# https://www.django-rest-framework.org
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'api.utils.TokenAuthSupportQueryString',
),
# 'DEFAULT_PERMISSION_CLASSES': (
# 'rest_framework.permissions.IsAuthenticated',
# ),
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 10,
'TEST_REQUEST_DEFAULT_FORMAT': 'json',
'DATETIME_FORMAT': '%d-%m-%Y %H:%M:%S',
}
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'fr'
TIME_ZONE = 'Europe/Paris'
USE_I18N = True
USE_L10N = True
USE_TZ = True
gettext = lambda x: x
LANGUAGES = (
('fr', gettext('French')),
('en', gettext('English')),
)
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'locale'),
]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(os.path.dirname(BASE_DIR), "static"),
)
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
MEDIA_ROOT = os.path.join(os.path.dirname(BASE_DIR), 'media')
LOGIN_REDIRECT_URL = '/dashboard/charts/'
LOGOUT_REDIRECT_URL = '/'
# Configuration sessions
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
SESSION_COOKIE_AGE = 1209600
# Configuration CkEditor
CKEDITOR_CONFIGS = {
'default': {
'toolbar': 'Custom',
'width': 765,
'toolbar_Custom': [
['Bold', 'Italic', 'Underline', 'Strike', '-', 'CopyFormatting', 'RemoveFormat'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'JustifyLeft', 'JustifyCenter',
'JustifyRight', 'JustifyBlock'],
['TextColor', 'BGColor', '-', 'Link', 'Unlink', 'Anchor'],
['Scayt', '-', 'Source']
],
},
'comment': {
'toolbar': 'Custom',
'height': 200,
'width': '100%',
'toolbar_Custom': [
['Bold', 'Italic', 'Underline', 'Strike', '-', 'CopyFormatting', 'RemoveFormat'],
['NumberedList', 'BulletedList', '-', 'Outdent', 'Indent', '-', 'JustifyLeft', 'JustifyCenter',
'JustifyRight', 'JustifyBlock'],
['TextColor', 'BGColor', 'FontSize', '-', 'Link', 'Unlink', 'Anchor'],
['Scayt', '-', 'Source']
],
}
}
# Configuration Tempus Dominus
TEMPUS_DOMINUS_LOCALIZE = True
TEMPUS_DOMINUS_INCLUDE_ASSETS = False
# Configuration DJANGO-CONSTANCE
CONSTANCE_BACKEND = 'constance.backends.redisd.RedisBackend'
CONSTANCE_REDIS_CONNECTION = 'redis://localhost:6379/1'
CONSTANCE_CONFIG = {
# General Options
'SITE_NAME': ('CSD Dashboard', 'Website title'),
'SITE_DESCRIPTION': ('', 'Website description'),
'WEBSITE_DOMAIN': ('127.0.0.1:8000', 'Webside domain name'),
# Network Options
'BASE_DIR': ('~/Documents/CSD_DATABASE', 'Network drive path'),
'XLS_RASPEEDI_FILE': ('PROG/RASPEEDI/table_boitier_PSA.xlsx', 'xls raspeedi file'),
'XLS_SQUALAETP_FILE': ('EXTS/squalaetp.xls', 'xls squalaetp file'),
'XLS_ATTRIBUTS_FILE': ('EXTS/Attributs CORVET.xlsx', 'xls attributs file'),
'CSV_EXTRACTION_FILE': ('EXTS/extraction.csv', 'csv extraction file'),
'XLS_ECU_REF_BASE': ('REMAN/PSA/Base ref REMAN.xlsx', 'xls ECU ref base'),
'XLS_DELAY_PATH': ('RH/AnalyseRetards', 'Path of xls delay files'),
'XLS_DELAY_FILES': (
'PSA.xlsx, ILOTAUTRE.xlsx, LaboQual.xlsx, DEFAUT.xlsx, CLARION.xlsx', 'List of xls delay file'
),
'XML_CORVET_PATH': ('LOGS/CORVET_XML_TEST', 'xml Corvet path'),
'TAG_XELON_PATH': ('LOGS/CALIBRE', 'tag xelon path'),
'TAG_XELON_LOG_PATH': ('LOGS/LOG_CONFIG_PROD', 'tag xelon log path'),
# CSD Repair Options
'VIN_ERROR_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'VIN error TO email list'),
'LATE_PRODUCTS_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'Late products TO email list'),
'REMAN_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'REMAN TO email list'),
'CHANGE_VIN_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'Change Xelon VIN TO email list'),
'CSD_CC_EMAIL_LIST': ('test1@test.com; test2@test.com', 'CSD Atelier CC email list'),
'CORVET_USER': ('', 'CORVET user for RepairLab'),
'CORVET_PWD': ('', 'CORVET password for RepairLab'),
'SQUALAETP_FILE_LIST': ('squalaetp_cal.xls, squalaetp_ecu.xls, squalaetp_prog.xls', 'Squalaetp file list'),
# REMAN Options
'ECU_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'REMAN TO email list'),
'ECU_CC_EMAIL_LIST': ('', 'REMAN CC email list'),
'BATCH_EXPORT_FILE': ('EXTS/reman_lots.csv', 'File name for exporting batch'),
'REPAIR_EXPORT_FILE': ('EXTS/reman_repairs.csv', 'File name for exporting repairs'),
'CHECKOUT_EXPORT_FILE': ('EXTS/reman_output.csv', 'File name for exporting data from check out'),
'SCAN_IN_OUT_EXPORT_FILE': ('EXTS/BASE_REF_REMAN.xlsx', 'File name for exporting Base Ref REMAN of SCAN IN/OUT'),
'DICT_YEAR': (
"{2020: 'C', 2021: 'D', 2022: 'G', 2023: 'H', 2024: 'K', 2025: 'L', 2026: 'O', 2027: 'T', 2028: 'U'}",
'REMAN batch date formatting dictionary'
),
# MQTT Options
"MQTT_TOPIC": ('TEMP/TC-01', 'Topic subcribe'),
'MQTT_TEMP_ADJ': (4, 'Temp adjust', int),
'MQTT_CLIENT': ('', 'Client name'),
'MQTT_USER': ('', 'Login'),
'MQTT_PSWD': ('', 'Password'),
'MQTT_BROKER': ('test.mosquitto.org', 'Server address'),
'MQTT_PORT': (1883, 'Server port', int),
'KEEP_ALIVE': (45, 'Keep alive', int),
# tools Options
'SUPTECH_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'Suptech TO email list'),
'SUPTECH_MANAGER_TO_EMAIL_LIST': ('test1@test.com; test2@test.com', 'Suptech Manager TO email list'),
'PRINTER_STREAM_URL': ('http://10.115.141.42:8080/?action=stream', '3D printer streaming URL'),
'PROXY_HOST_SCRAPING': ('', 'Proxy HOST for Scraping'),
'PROXY_PORT_SCRAPING': ('', 'Proxy PORT for Scraping'),
}
CONSTANCE_CONFIG_FIELDSETS = {
'1. General Options': ('SITE_NAME', 'SITE_DESCRIPTION', 'WEBSITE_DOMAIN'),
'2. Network Options': (
'BASE_DIR', 'XLS_RASPEEDI_FILE', 'XLS_SQUALAETP_FILE', 'XLS_ATTRIBUTS_FILE', 'CSV_EXTRACTION_FILE',
'XLS_ECU_REF_BASE', 'XLS_DELAY_PATH', 'XLS_DELAY_FILES', 'XML_CORVET_PATH', 'TAG_XELON_PATH',
'TAG_XELON_LOG_PATH'
),
'3. CSD Repair Options': (
'VIN_ERROR_TO_EMAIL_LIST', 'LATE_PRODUCTS_TO_EMAIL_LIST', 'REMAN_TO_EMAIL_LIST', 'CHANGE_VIN_TO_EMAIL_LIST',
'CSD_CC_EMAIL_LIST', 'CORVET_USER', 'CORVET_PWD', 'SQUALAETP_FILE_LIST'
),
'4. REMAN Options': (
'ECU_TO_EMAIL_LIST', 'ECU_CC_EMAIL_LIST', 'BATCH_EXPORT_FILE', 'REPAIR_EXPORT_FILE',
'CHECKOUT_EXPORT_FILE', 'SCAN_IN_OUT_EXPORT_FILE', 'DICT_YEAR'
),
'5. Tools Options': (
'SUPTECH_TO_EMAIL_LIST', 'SUPTECH_MANAGER_TO_EMAIL_LIST', 'PRINTER_STREAM_URL', 'PROXY_HOST_SCRAPING',
'PROXY_PORT_SCRAPING'
),
'6. MQTT Options': (
'MQTT_TOPIC', 'MQTT_TEMP_ADJ', 'MQTT_CLIENT', 'MQTT_USER', 'MQTT_PSWD', 'MQTT_BROKER', 'MQTT_PORT', 'KEEP_ALIVE'
)
}
# CELERY STUFF
CELERY_BROKER_URL = 'redis://localhost:6379/0'
CELERY_RESULT_BACKEND = 'django-db'
CELERY_ACCEPT_CONTENT = ["pickle", "json", "msgpack", "yaml"]
CELERY_TASK_IGNORE_RESULT = False
CELERY_TIMEZONE = "Europe/Paris"
###############################
# DJANGO LOGGER CONFIGURATION
###############################
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '[{asctime}] [{process:d}] [{levelname}] {message}',
'datefmt': "%Y-%m-%d %H:%M:%S",
'style': '{',
},
'simple': {
'format': '[{levelname}] {message}',
'style': '{',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
'console_verbose': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
'mail_admin': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler',
'include_html': True,
}
},
'root': {
'handlers': ['console'],
'level': 'WARNING',
},
'loggers': {
'django': {
'handlers': ['console_verbose'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'WARNING'),
'propagate': False,
},
'django.request': {
'handlers': ['mail_admin'],
'level': 'ERROR',
'propagate': False,
},
'command': {
'handlers': ['mail_admin', 'console_verbose'],
'level': 'ERROR',
'propagate': False,
},
'celery': {
'handlers': ['mail_admin', 'console_verbose'],
'level': 'ERROR',
'propagate': False,
}
},
}
| 33.861619
| 120
| 0.633819
|
baa6cea7cd3ac0959f35b5f7066948029f9c4690
| 3,859
|
py
|
Python
|
tests/unit_tests/data_steward/cdr_cleaner/cleaning_rules/rdr_observation_source_concept_id_suppression_test.py
|
dcampbell-vumc/curation
|
327296b12cc0864cf32636be03b2b020447bd012
|
[
"MIT"
] | null | null | null |
tests/unit_tests/data_steward/cdr_cleaner/cleaning_rules/rdr_observation_source_concept_id_suppression_test.py
|
dcampbell-vumc/curation
|
327296b12cc0864cf32636be03b2b020447bd012
|
[
"MIT"
] | null | null | null |
tests/unit_tests/data_steward/cdr_cleaner/cleaning_rules/rdr_observation_source_concept_id_suppression_test.py
|
dcampbell-vumc/curation
|
327296b12cc0864cf32636be03b2b020447bd012
|
[
"MIT"
] | null | null | null |
"""
Unit Test for the rdr_observation_source_concept_id_suppression module.
Remove three irrelevant observation_source_concept_ids from the RDR dataset.
Original Issue: DC-529
The intent is to remove PPI records from the observation table in the RDR
export where observation_source_concept_id in (43530490, 43528818, 43530333).
The records for removal should be archived in the dataset sandbox.
"""
# Python imports
import unittest
# Third party imports
# Project imports
from constants.bq_utils import WRITE_TRUNCATE
from constants.cdr_cleaner import clean_cdr as clean_consts
from cdr_cleaner.cleaning_rules.rdr_observation_source_concept_id_suppression import ObservationSourceConceptIDRowSuppression, SAVE_TABLE_NAME, DROP_SELECTION_QUERY, DROP_QUERY
class ObservationSourceConceptIDRowSuppressionTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
print('**************************************************************')
print(cls.__name__)
print('**************************************************************')
def setUp(self):
self.project_id = 'foo'
self.dataset_id = 'bar'
self.sandbox_id = 'baz'
self.query_class = ObservationSourceConceptIDRowSuppression(
self.project_id, self.dataset_id, self.sandbox_id)
self.assertEqual(self.query_class.get_project_id(), self.project_id)
self.assertEqual(self.query_class.get_dataset_id(), self.dataset_id)
self.assertEqual(self.query_class.get_sandbox_dataset_id(),
self.sandbox_id)
def test_setup_rule(self):
# test
self.query_class.setup_rule()
# no errors are raised, nothing happens
def test_get_query_specs(self):
# pre-conditions
self.assertEqual(self.query_class.get_affected_datasets(),
[clean_consts.RDR])
# test
result_list = self.query_class.get_query_specs()
# post conditions
expected_list = [{
clean_consts.QUERY:
DROP_SELECTION_QUERY.format(project=self.project_id,
dataset=self.dataset_id,
sandbox=self.sandbox_id,
drop_table=SAVE_TABLE_NAME)
}, {
clean_consts.QUERY:
DROP_QUERY.format(project=self.project_id,
dataset=self.dataset_id),
clean_consts.DESTINATION_TABLE:
'observation',
clean_consts.DESTINATION_DATASET:
self.dataset_id,
clean_consts.DISPOSITION:
WRITE_TRUNCATE
}]
self.assertEqual(result_list, expected_list)
def test_log_queries(self):
# pre-conditions
self.assertEqual(self.query_class.get_affected_datasets(),
[clean_consts.RDR])
store_drops = DROP_SELECTION_QUERY.format(project=self.project_id,
dataset=self.dataset_id,
sandbox=self.sandbox_id,
drop_table=SAVE_TABLE_NAME)
select_saves = DROP_QUERY.format(project=self.project_id,
dataset=self.dataset_id)
# test
with self.assertLogs(level='INFO') as cm:
self.query_class.log_queries()
expected = [
'INFO:cdr_cleaner.cleaning_rules.base_cleaning_rule:Generated SQL Query:\n'
+ store_drops,
'INFO:cdr_cleaner.cleaning_rules.base_cleaning_rule:Generated SQL Query:\n'
+ select_saves
]
# post condition
self.assertEqual(cm.output, expected)
| 37.466019
| 176
| 0.59575
|
e2a92e719df8029b372df6274fb8d2f188e11586
| 2,534
|
py
|
Python
|
python/setup.py
|
ishine/lid_kaldi
|
0f9d61dff2ee04df656fb0ace10dc9f614cfa05d
|
[
"Apache-2.0"
] | 24
|
2021-04-03T11:09:18.000Z
|
2022-02-23T15:31:29.000Z
|
python/setup.py
|
gooran/lid_kaldi
|
bd7456c472b9ceb824cc784bcd010d5c6d060934
|
[
"Apache-2.0"
] | 3
|
2021-05-15T12:37:04.000Z
|
2022-03-31T17:45:20.000Z
|
python/setup.py
|
gooran/lid_kaldi
|
bd7456c472b9ceb824cc784bcd010d5c6d060934
|
[
"Apache-2.0"
] | 6
|
2021-04-29T06:27:34.000Z
|
2021-08-30T08:49:37.000Z
|
import os
import sys
import setuptools
import shutil
import glob
import platform
# Figure out environment for cross-compile
lid_source = os.getenv("LID_SOURCE", os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
system = os.environ.get('LID_PLATFORM', platform.system())
architecture = os.environ.get('LID_ARCHITECTURE', platform.architecture()[0])
# Copy precompmilled libraries
for lib in glob.glob(os.path.join(lid_source, "native/lib*.*")):
print ("Adding library", lib)
shutil.copy(lib, "lid")
# Create OS-dependent, but Python-independent wheels.
try:
from wheel.bdist_wheel import bdist_wheel
except ImportError:
cmdclass = {}
else:
class bdist_wheel_tag_name(bdist_wheel):
def get_tag(self):
abi = 'none'
if system == 'Darwin':
oses = 'macosx_10_6_x86_64'
elif system == 'Windows' and architecture == '32bit':
oses = 'win32'
elif system == 'Windows' and architecture == '64bit':
oses = 'win_amd64'
elif system == 'Linux' and architecture == '64bit':
oses = 'linux_x86_64'
elif system == 'Linux':
oses = 'linux_' + architecture
else:
raise TypeError("Unknown build environment")
return 'py3', abi, oses
cmdclass = {'bdist_wheel': bdist_wheel_tag_name}
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="lid",
version="1.0.1",
author="Igor Sitdikov",
author_email="ihar.sitdzikau@yandex.ru",
description="Spoken Language Identification",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/igorsitdikov/lid_kaldi",
packages=setuptools.find_packages(),
package_data = {'lid': ['*.so', '*.dll', '*.dyld']},
include_package_data=True,
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: Apache Software License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries :: Python Modules'
],
cmdclass=cmdclass,
python_requires='>=3',
zip_safe=False, # Since we load so file from the filesystem, we can not run from zip file
setup_requires=['cffi>=1.0'],
install_requires=['cffi>=1.0'],
cffi_modules=['lid_builder.py:ffibuilder'],
)
| 35.194444
| 100
| 0.635754
|
3b5c09eafaec405af26501c11f072b375b00bad2
| 3,193
|
py
|
Python
|
02-multi-armed-bandits/plots.py
|
nikibobi/rl-an-introduction
|
b1aeb8263f38af6a3f5ea4663f8e40b574037e73
|
[
"MIT"
] | null | null | null |
02-multi-armed-bandits/plots.py
|
nikibobi/rl-an-introduction
|
b1aeb8263f38af6a3f5ea4663f8e40b574037e73
|
[
"MIT"
] | null | null | null |
02-multi-armed-bandits/plots.py
|
nikibobi/rl-an-introduction
|
b1aeb8263f38af6a3f5ea4663f8e40b574037e73
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
import seaborn as sns
from scipy.stats import norm
def generate_normal(num=100, e=0.01):
return norm.ppf(np.linspace(e, 1 - e, num=num, dtype=np.float))
normal = generate_normal()
def plot_bandits(means):
n = len(means)
data = np.repeat(normal[:, np.newaxis], n, axis=1) + means
sns.set(style='whitegrid')
ax = sns.violinplot(data=data, inner=None)
ax = sns.scatterplot(data=means, ax=ax)
ax.set_xlabel('Actions')
ax.set_ylabel('Q(a)')
ax.set_xticklabels(np.arange(n) + 1)
ax.yaxis.set_major_locator(ticker.MultipleLocator())
plt.show()
def plot_rewards(metric, n, xlim, legend, filename='rewards', title='Rewards'):
fig, ax1 = plt.subplots()
fig.canvas.set_window_title(title)
ax1.set_title(title)
ax1.plot(metric)
ax1.legend(legend)
ax1.set_xlim(0, xlim)
ax1.set_xlabel('Step')
ax1.set_ylabel('Total Reward')
ax2 = ax1.twinx()
ax2.plot(metric / n)
ax2.set_ylabel('Average Reward')
fig.tight_layout()
fig.savefig('results/{}.png'.format(filename))
plt.show()
def plot_optimal(metric, xlim, legend, filename='optimal_actions', title='Optimal Actions'):
fig, ax1 = plt.subplots()
fig.canvas.set_window_title(title)
ax1.set_title(title)
ax1.plot(metric * 100.0)
ax1.legend(legend)
ax1.set_xlim(0, xlim)
ax1.set_xlabel('Step')
ax1.set_ylim(0, 100)
ax1.set_ylabel('Optimal Action %')
ax1.yaxis.set_major_formatter(ticker.PercentFormatter())
ax2 = ax1.twinx()
ax2.plot(metric)
ax2.set_ylim(0, 1)
ax2.set_ylabel('Optimal Action P')
fig.tight_layout()
fig.savefig('results/{}.png'.format(filename))
plt.show()
def multicolor_xlabel(ax, list_of_strings, anchorpad=0, **kw):
# code from: https://stackoverflow.com/a/33162465
from matplotlib.offsetbox import AnchoredOffsetbox, TextArea, HPacker, VPacker
boxes = [TextArea(text, textprops=dict(color='C{}'.format(i), ha='left', va='bottom', **kw))
for i, text in enumerate(list_of_strings)]
xbox = HPacker(children=boxes, align='center', pad=0, sep=5)
anchored_xbox = AnchoredOffsetbox(loc='lower center', child=xbox, pad=anchorpad, frameon=False,
bbox_transform=ax.transAxes, borderpad=0.0)
ax.add_artist(anchored_xbox)
def plot_summary(results, steps):
fig, ax = plt.subplots()
fig.set_size_inches((16, 9))
title = 'Summary'
fig.canvas.set_window_title(title)
ax.set_title(title)
params = []
for x, y, label, param in results:
ax.plot(x, y, label=label)
params.append(param)
powers = range(-7, 3)
ticks = [2 ** i for i in powers]
ax.set_xscale('log', basex=2)
ax.set_xticks(ticks)
ax.set_xticklabels([r'$\frac{1}{%s}$' % (2 ** -i) if i < 0 else str(2 ** i) for i in powers])
ax.set_xlim(min(ticks), max(ticks))
ax.set_xlabel('Parameter Value')
multicolor_xlabel(ax, params, size=22)
ax.set_ylabel('Average reward over first {} steps'.format(steps))
plt.legend()
fig.savefig('results/summary.png', dpi=100)
plt.show()
| 34.706522
| 99
| 0.66176
|
01753677e3f3ce3d54c9bf70559665cec382b481
| 17,046
|
py
|
Python
|
rmgpy/cantherm/qchem.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | 1
|
2017-12-18T18:43:22.000Z
|
2017-12-18T18:43:22.000Z
|
rmgpy/cantherm/qchem.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | null | null | null |
rmgpy/cantherm/qchem.py
|
keceli/RMG-Py
|
17c7870195a4feb6e8bf8974292f9bcdca1a1d9d
|
[
"MIT"
] | 1
|
2021-08-14T13:47:18.000Z
|
2021-08-14T13:47:18.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2009 Prof. William H. Green (whgreen@mit.edu) and the
# RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import math
import numpy
import os.path
import rmgpy.constants as constants
import logging
from rmgpy.cantherm.common import checkConformerEnergy
from rmgpy.statmech import IdealGasTranslation, NonlinearRotor, LinearRotor, HarmonicOscillator, Conformer
################################################################################
class QchemLog:
"""
Represent an output file from Qchem. The attribute `path` refers to the
location on disk of the Qchem output file of interest. Methods are provided
to extract a variety of information into CanTherm classes and/or NumPy
arrays.
"""
def __init__(self, path):
self.path = path
def getNumberOfAtoms(self):
"""
Return the number of atoms in the molecular configuration used in
the Qchem output file.
"""
Natoms = 0
# Open Qchem log file for parsing
f = open(self.path, 'r')
line = f.readline()
while line != '' and Natoms == 0:
# Automatically determine the number of atoms
if 'Standard Nuclear Orientation' in line and Natoms == 0:
for i in range(3): line = f.readline()
while '----------------------------------------------------' not in line:
Natoms += 1
line = f.readline()
line = f.readline()
# Close file when finished
f.close()
# Return the result
return Natoms
def loadForceConstantMatrix(self):
"""
Return the force constant matrix (in Cartesian coordinates) from the
QChem log file. If multiple such matrices are identified,
only the last is returned. The units of the returned force constants
are J/m^2. If no force constant matrix can be found in the log file,
``None`` is returned.
"""
F = None
Natoms = self.getNumberOfAtoms()
Nrows = Natoms * 3
f = open(self.path, 'r')
line = f.readline()
while line != '':
# Read force constant matrix
if 'Final Hessian.' in line or 'Hessian of the SCF Energy' in line:
F = numpy.zeros((Nrows,Nrows), numpy.float64)
for i in range(int(math.ceil(Nrows / 6.0))):
# Header row
line = f.readline()
# Matrix element rows
for j in range(Nrows): #for j in range(i*6, Nrows):
data = f.readline().split()
for k in range(len(data)-1):
F[j,i*6+k] = float(data[k+1])
#F[i*5+k,j] = F[j,i*5+k]
# Convert from atomic units (Hartree/Bohr_radius^2) to J/m^2
F *= 4.35974417e-18 / 5.291772108e-11**2
line = f.readline()
# Close file when finished
f.close()
return F
def loadGeometry(self):
"""
Return the optimum geometry of the molecular configuration from the
Qchem log file. If multiple such geometries are identified, only the
last is returned.
"""
atom = []; coord = []; number = [];
try:
f = open(self.path, 'r')
except IndexError:
print('File not found')
f = open(self.path, 'r')
line = f.readline()
while line != '':
if 'Final energy is' in line:
print 'found a sucessfully completed Qchem Geometry Optimization Job'
line = f.readline()
atom = []; coord = []
break
line = f.readline()
found = 0
while line != '':
if 'Standard Nuclear Orientation' in line:
found += 1
for i in range(3): line = f.readline() # skip lines
while '----------------------------------------------------' not in line:
data = line.split()
atom.append((data[1]))
coord.append([float(data[2]), float(data[3]), float(data[4])])
line = f.readline()
# Read the next line in the file
line = f.readline()
# Read the next line in the file
line = f.readline()
if found ==1: break
line = f.readline()
#print coord
f.close()
coord = numpy.array(coord, numpy.float64)
mass = numpy.array(coord, numpy.float64)
# Assign appropriate mass to each atom in molecule
# These values were taken from "Atomic Weights and Isotopic Compositions" v3.0 (July 2010) from NIST
mass = [0]*len(atom)
for i in range(len(atom)):
if atom[i] == 'H':
mass[i] = 1.00782503207
number.append('1')
elif atom[i] == 'C':
mass[i] = 12.0
number.append('6')
elif atom[i] == 'N':
mass[i] = 14.0030740048
number[i] = 7
number.append('7')
elif atom[i] == 'O':
mass[i] = 15.99491461956
number.append('8')
elif atom[i] == 'P':
mass[i] = 30.97376163
number.append('15')
elif atom[i] == 'S':
mass[i] = 31.97207100
number.append('16')
elif atom[i] == 'Cl':
mass[i] = 35.4527
number.append('17')
else:
print 'Atomic atom {0:d} not yet supported in loadGeometry().'.format(atom[i])
number = numpy.array(number, numpy.int)
return coord, number, mass
def loadConformer(self, symmetry=None, spinMultiplicity=None, opticalIsomers=1):
"""
Load the molecular degree of freedom data from a output file created as
the result of a Qchem "Freq" calculation. As
Qchem's guess of the external symmetry number is not always correct,
you can use the `symmetry` parameter to substitute your own value; if
not provided, the value in the Qchem output file will be adopted.
"""
modes = []; freq = []; mmass = []; rot = []
E0 = 0.0
# symmetry = 1
f = open(self.path, 'r')
line = f.readline()
while line != '':
# Read spin multiplicity if not explicitly given
if '$molecule' in line and spinMultiplicity is None:
line = f.readline()
if len(line.split()) == 2:
spinMultiplicity = int(float(line.split()[1]))
# The rest of the data we want is in the Thermochemistry section of the output
elif 'VIBRATIONAL ANALYSIS' in line:
modes = []
inPartitionFunctions = False
line = f.readline()
while line != '':
# This marks the end of the thermochemistry section
if 'Thank you very much for using Q-Chem.' in line:
break
# Read vibrational modes
elif 'VIBRATIONAL FREQUENCIES (CM**-1)' in line:
frequencies = []
while 'STANDARD THERMODYNAMIC QUANTITIES AT' not in line:
if ' Frequency:' in line:
if len(line.split()) == 4:
frequencies.extend([float(d) for d in line.split()[-3:]])
elif len(line.split()) == 3:
frequencies.extend([float(d) for d in line.split()[-2:]])
elif len(line.split()) == 2:
frequencies.extend([float(d) for d in line.split()[-1:]])
line = f.readline()
line = f.readline()
# If there is an imaginary frequency, remove it
if frequencies[0] < 0.0:
frequencies = frequencies[1:]
vibration = HarmonicOscillator(frequencies=(frequencies,"cm^-1"))
#modes.append(vibration)
freq.append(vibration)
# Read molecular mass for external translational modes
elif 'Molecular Mass:' in line:
mass = float(line.split()[2])
translation = IdealGasTranslation(mass=(mass,"amu"))
#modes.append(translation)
mmass.append(translation)
# Read moments of inertia for external rotational modes, given in atomic units
elif 'Eigenvalues --' in line:
inertia = [float(d) for d in line.split()[-3:]]
# If the first eigenvalue is 0, the rotor is linear
symmetry = 1
if inertia[0] == 0.0:
inertia.remove(0.0)
logging.debug('inertia is {}'.format(str(inertia)))
for i in range(2):
inertia[i] *= (constants.a0/1e-10)**2
inertia = numpy.sqrt(inertia[0]*inertia[1])
rotation = LinearRotor(inertia=(inertia,"amu*angstrom^2"), symmetry=symmetry)
rot.append(rotation)
else:
for i in range(3):
inertia[i] *= (constants.a0/1e-10)**2
pass
rotation = NonlinearRotor(inertia=(inertia,"amu*angstrom^2"), symmetry=symmetry)
#modes.append(rotation)
rot.append(rotation)
# Read Qchem's estimate of the external rotational symmetry number, which may very well be incorrect
elif 'Rotational Symmetry Number is' in line: # and symmetry is None:
symmetry = int(float(line.split()[4]))
logging.debug('rot sym is {}'.format(str(symmetry)))
# Read the next line in the file
line = f.readline()
# Read the next line in the file
line = f.readline()
# Close file when finished
f.close()
modes = mmass + rot + freq
return Conformer(E0=(E0*0.001,"kJ/mol"), modes=modes, spinMultiplicity=spinMultiplicity, opticalIsomers=opticalIsomers)
def loadEnergy(self,frequencyScaleFactor=1.):
"""
Load the energy in J/mol from a Qchem log file. Only the last energy
in the file is returned. The zero-point energy is *not* included in
the returned value.
"""
modes = []
E0 = None
spinMultiplicity = 1
f = open(self.path, 'r')
line = f.readline()
while line != '':
if 'Final energy is' in line:
E0 = float(line.split()[3]) * constants.E_h * constants.Na
logging.debug('energy is {}'.format(str(E0)))
# elif 'Zero point vibrational energy' in line:
#Qchem's ZPE is in kcal/mol
# ZPE = float(line.split()[4]) * 4184
# scaledZPE = ZPE * frequencyScaleFactor
# print 'ZPE is ' + str(ZPE)
# Read the next line in the file
line = f.readline()
# Close file when finished
f.close()
if E0 is not None:
return E0
else:
raise Exception('Unable to find energy in Qchem output file.')
def loadZeroPointEnergy(self,frequencyScaleFactor=1.):
"""
Load the unscaled zero-point energy in J/mol from a Qchem output file.
"""
modes = []
ZPE = None
spinMultiplicity = 1
f = open(self.path, 'r')
line = f.readline()
while line != '':
# if 'Final energy is' in line:
# E0 = float(line.split()[3]) * constants.E_h * constants.Na
# print 'energy is' + str(E0)
if 'Zero point vibrational energy' in line:
#Qchem's ZPE is in kcal/mol
ZPE = float(line.split()[4]) * 4184
#scaledZPE = ZPE * frequencyScaleFactor
logging.debug('ZPE is {}'.format(str(ZPE)))
# Read the next line in the file
line = f.readline()
# Close file when finished
f.close()
if ZPE is not None:
return ZPE
else:
raise Exception('Unable to find zero-point energy in Qchem output file.')
def loadScanEnergies(self):
"""
Extract the optimized energies in J/mol from a Qchem log file, e.g. the
result of a Qchem "PES Scan" quantum chemistry calculation.
"""
Vlist = []
angle = []
f = open(self.path, 'r')
line = f.readline()
while line != '':
if 'Summary of potential scan:' in line:
line = f.readline()
print 'found a sucessfully completed Qchem Job'
while '-----------------' not in line:
# print len(line.split())
# Vlist.append(float(line.split()[1]))
values = [float(item) for item in line.split()]
angle.append(values[0])
Vlist.append(values[1])
# Read the next line in the file
line = f.readline()
line = f.readline()
if 'SCF failed to converge' in line:
print 'Qchem Job did not sucessfully complete: SCF failed to converge'
break
# Close file when finished
print ' Assuming', os.path.basename(self.path), 'is the output from a Qchem PES scan...'
f.close()
Vlist = numpy.array(Vlist, numpy.float64)
# check to see if the scanlog indicates that one of your reacting species may not be the lowest energy conformer
checkConformerEnergy(Vlist, self.path)
# Adjust energies to be relative to minimum energy conformer
# Also convert units from Hartree/particle to J/mol
Vlist -= numpy.min(Vlist)
Vlist *= constants.E_h * constants.Na
angle = numpy.arange(0.0, 2*math.pi+0.00001, 2*math.pi/(len(Vlist)-1), numpy.float64)
return Vlist, angle
def loadNegativeFrequency(self):
"""
Return the imaginary frequency from a transition state frequency
calculation in cm^-1.
"""
f = open(self.path, 'r')
line = f.readline()
while line != '':
# Read imaginary frequency
if ' Frequency:' in line:
frequency = float((line.split()[1]))
break
line = f.readline()
# Close file when finished
f.close()
#Make sure the frequency is imaginary:
if frequency < 0:
return frequency
else:
raise Exception('Unable to find imaginary frequency in QChem output file.')
| 41.474453
| 127
| 0.503461
|
ab60ebb9e77b2e6317b142abedd7e0287623e22d
| 841
|
py
|
Python
|
ch2/package/ch2_pkg_py/ch2_pkg_py/pkg2go.py
|
homalozoa/ros2_for_beginners_code
|
0515621d9cb53cc0c0e2380cf9a52e68ce975e76
|
[
"Apache-2.0"
] | null | null | null |
ch2/package/ch2_pkg_py/ch2_pkg_py/pkg2go.py
|
homalozoa/ros2_for_beginners_code
|
0515621d9cb53cc0c0e2380cf9a52e68ce975e76
|
[
"Apache-2.0"
] | null | null | null |
ch2/package/ch2_pkg_py/ch2_pkg_py/pkg2go.py
|
homalozoa/ros2_for_beginners_code
|
0515621d9cb53cc0c0e2380cf9a52e68ce975e76
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2022 Homalozoa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Pkg2Go():
def __init__(self, name):
self.name = name
def get_pkg2go_name(self):
return self.name
def main(args=None):
pkg2go = Pkg2Go('Bye ROS 2.')
print(pkg2go.get_pkg2go_name())
if __name__ == '__main__':
main()
| 26.28125
| 74
| 0.712247
|
491e7618a61185fd306819de93d7d793b63a8075
| 53
|
py
|
Python
|
main/courses/exams/forms.py
|
mahkhaled/class2go
|
b32cb441e8d96c257f70cb61274812ebeed2649d
|
[
"Apache-2.0"
] | 2
|
2015-10-31T23:12:52.000Z
|
2021-01-19T11:03:00.000Z
|
main/courses/exams/forms.py
|
sunu/class2go
|
653b1edd01d390ad387dd788e0fc2d89445fbcab
|
[
"Apache-2.0"
] | null | null | null |
main/courses/exams/forms.py
|
sunu/class2go
|
653b1edd01d390ad387dd788e0fc2d89445fbcab
|
[
"Apache-2.0"
] | null | null | null |
from django import forms
from c2g.models import Exam
| 26.5
| 27
| 0.830189
|
8631867f80552746e0f3a68b35253c1d6ec7fd96
| 15,731
|
py
|
Python
|
tests/test_queryparser.py
|
psyhomb/alerta
|
299f0b92ff67149190d1c781c4d203dbcc8a81b6
|
[
"Apache-2.0"
] | null | null | null |
tests/test_queryparser.py
|
psyhomb/alerta
|
299f0b92ff67149190d1c781c4d203dbcc8a81b6
|
[
"Apache-2.0"
] | 60
|
2020-07-27T07:00:45.000Z
|
2022-03-21T18:02:18.000Z
|
tests/test_queryparser.py
|
psyhomb/alerta
|
299f0b92ff67149190d1c781c4d203dbcc8a81b6
|
[
"Apache-2.0"
] | 1
|
2020-11-24T03:16:49.000Z
|
2020-11-24T03:16:49.000Z
|
import unittest
def skip_postgres():
try:
import psycopg2 # noqa
except ImportError:
return True
return False
class PostgresQueryTestCase(unittest.TestCase):
def setUp(self):
if skip_postgres():
self.skipTest('psycopg2 import failed')
from alerta.database.backends.postgres.queryparser import \
QueryParser as PostgresQueryParser
self.parser = PostgresQueryParser()
def test_word_and_phrase_terms(self):
# default field (ie. "text") contains word
string = r'''quick'''
r = self.parser.parse(string)
self.assertEqual(r, '"text" ILIKE \'%%quick%%\'')
# default field (ie. "text") contains phrase
string = r'''"quick brown"'''
r = self.parser.parse(string)
self.assertEqual(r, '"text" ~* \'\\yquick brown\\y\'')
def test_field_names(self):
# field contains word
string = r'''status:active'''
r = self.parser.parse(string)
self.assertEqual(r, '"status" ILIKE \'%%active%%\'')
# field contains either words
string = r'''title:(quick OR brown)'''
r = self.parser.parse(string)
self.assertEqual(r, '("title" ILIKE \'%%quick%%\' OR "title" ILIKE \'%%brown%%\')')
# field contains either words (default operator)
string = r'''title:(quick brown)'''
r = self.parser.parse(string)
self.assertEqual(r, '("title" ILIKE \'%%quick%%\' OR "title" ILIKE \'%%brown%%\')')
# field exact match
string = r'''author:"John Smith"'''
r = self.parser.parse(string)
self.assertEqual(r, '"author" ~* \'\\yJohn Smith\\y\'')
# # any attribute contains word or phrase
# string = r'''attributes.\*:(quick brown)'''
# r = self.parser.parse(string)
# self.assertEqual(r, '??')
# attribute field has non-null value
string = r'''_exists_:title'''
r = self.parser.parse(string)
self.assertEqual(r, '"attributes"::jsonb ? \'title\'')
# attribute contains word
string = r'''foo.vendor:cisco'''
r = self.parser.parse(string)
self.assertEqual(r, '"foo"::jsonb ->>\'vendor\' ILIKE \'%%cisco%%\'')
# attribute contains word ("_" shortcut)
string = r'''_.vendor:cisco'''
r = self.parser.parse(string)
self.assertEqual(r, '"attributes"::jsonb ->>\'vendor\' ILIKE \'%%cisco%%\'')
# attribute contains either words (default operator)
string = r'''attributes.vendor:(cisco juniper)'''
r = self.parser.parse(string)
self.assertEqual(r, '("attributes"::jsonb ->>\'vendor\' ILIKE \'%%cisco%%\' OR "attributes"::jsonb ->>\'vendor\' ILIKE \'%%juniper%%\')')
# attribute contains either words ("_" shortcut, default operator)
string = r'''_.vendor:(cisco juniper)'''
r = self.parser.parse(string)
self.assertEqual(r, '("attributes"::jsonb ->>\'vendor\' ILIKE \'%%cisco%%\' OR "attributes"::jsonb ->>\'vendor\' ILIKE \'%%juniper%%\')')
def test_wildcards(self):
# ? = single character, * = one or more characters
string = r'''text:qu?ck bro*'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yqu.?ck\\y\' OR "text" ~* \'\\ybro.*\\y\')')
def test_regular_expressions(self):
string = r'''name:/joh?n(ath[oa]n)/'''
r = self.parser.parse(string)
self.assertEqual(r, '"name" ~* \'joh?n(ath[oa]n)\'')
def test_fuzziness(self):
pass
def test_proximity_searches(self):
pass
def test_ranges(self):
string = r'''date:[2012-01-01 TO 2012-12-31]'''
r = self.parser.parse(string)
self.assertEqual(r, '("date" >= \'2012-01-01\' AND "date" <= \'2012-12-31\')')
string = r'''count:[1 TO 5]'''
r = self.parser.parse(string)
self.assertEqual(r, '("count" >= \'1\' AND "count" <= \'5\')')
string = r'''tag:{alpha TO omega}'''
r = self.parser.parse(string)
self.assertEqual(r, '("tag" > \'alpha\' AND "tag" < \'omega\')')
string = r'''count:[10 TO *]'''
r = self.parser.parse(string)
self.assertEqual(r, '("count" >= \'10\' AND 1=1)')
string = r'''date:{* TO 2012-01-01}'''
r = self.parser.parse(string)
self.assertEqual(r, '(1=1 AND "date" < \'2012-01-01\')')
string = r'''count:[1 TO 5}'''
r = self.parser.parse(string)
self.assertEqual(r, '("count" >= \'1\' AND "count" < \'5\')')
def test_unbounded_ranges(self):
string = r'''age:>10'''
r = self.parser.parse(string)
self.assertEqual(r, '("age" > \'10\')')
string = r'''age:>=10'''
r = self.parser.parse(string)
self.assertEqual(r, '("age" >= \'10\')')
string = r'''age:<10'''
r = self.parser.parse(string)
self.assertEqual(r, '("age" < \'10\')')
string = r'''age:<=10'''
r = self.parser.parse(string)
self.assertEqual(r, '("age" <= \'10\')')
def test_boosting(self):
pass
def test_boolean_operators(self):
# OR (||)
string = r'''"jakarta apache" jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' OR "text" ILIKE \'%%jakarta%%\')')
string = r'''"jakarta apache" OR jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' OR "text" ILIKE \'%%jakarta%%\')')
string = r'''"jakarta apache" || jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' OR "text" ILIKE \'%%jakarta%%\')')
# AND (&&)
string = r'''"jakarta apache" AND "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' AND "text" ~* \'\\yApache Lucene\\y\')')
string = r'''"jakarta apache" && "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' AND "text" ~* \'\\yApache Lucene\\y\')')
# + (required)
pass
# NOT (!)
string = r'''"jakarta apache" NOT "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' AND NOT ("text" ~* \'\\yApache Lucene\\y\'))')
string = r'''"jakarta apache" !"Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '("text" ~* \'\\yjakarta apache\\y\' AND NOT ("text" ~* \'\\yApache Lucene\\y\'))')
string = r'''NOT "jakarta apache"'''
r = self.parser.parse(string)
self.assertEqual(r, 'NOT ("text" ~* \'\\yjakarta apache\\y\')')
string = r'''group:"jakarta apache" NOT group:"Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '("group" ~* \'\\yjakarta apache\\y\' AND NOT ("group" ~* \'\\yApache Lucene\\y\'))')
# - (prohibit)
pass
def test_grouping(self):
# field exact match
string = r'''(quick OR brown) AND fox'''
r = self.parser.parse(string)
self.assertEqual(r, '(("text" ILIKE \'%%quick%%\' OR "text" ILIKE \'%%brown%%\') AND "text" ILIKE \'%%fox%%\')')
# field exact match
string = r'''status:(active OR pending) title:(full text search)'''
r = self.parser.parse(string)
self.assertEqual(
r, '(("status" ILIKE \'%%active%%\' OR "status" ILIKE \'%%pending%%\') OR ("title" ILIKE \'%%full%%\' OR "title" ILIKE \'%%text%%\'))')
def skip_mongodb():
try:
import pymongo # noqa
except ImportError:
return True
return False
class MongoQueryTestCase(unittest.TestCase):
def setUp(self):
if skip_mongodb():
self.skipTest('pymongo import failed')
from alerta.database.backends.mongodb.queryparser import \
QueryParser as MongoQueryParser
self.parser = MongoQueryParser()
def test_word_and_phrase_terms(self):
# default field (ie. "text") contains word
string = r'''quick'''
r = self.parser.parse(string)
self.assertEqual(r, '{"text": {"$regex": "quick"}}')
# default field (ie. "text") contains phrase
string = r'''"quick brown"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"text": {"$regex": "quick brown"}}')
def test_field_names(self):
# field contains word
string = r'''status:active'''
r = self.parser.parse(string)
self.assertEqual(r, '{"status": {"$regex": "active"}}')
# field contains either words
string = r'''title:(quick OR brown)'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"title": {"$regex": "quick"}}, {"title": {"$regex": "brown"}}]}')
# field contains either words (default operator)
string = r'''title:(quick brown)'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"title": {"$regex": "quick"}}, {"title": {"$regex": "brown"}}]}')
# field exact match
string = r'''author:"John Smith"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"author": {"$regex": "John Smith"}}')
# # # any attribute contains word or phrase
# # string = r'''attributes.\*:(quick brown)'''
# # r = self.parser.parse(string)
# # self.assertEqual(r, '??')
# attribute field has non-null value
string = r'''_exists_:title'''
r = self.parser.parse(string)
self.assertEqual(r, '{"attributes.title": {"$exists": true}}')
# attribute contains word
string = r'''foo.vendor:cisco'''
r = self.parser.parse(string)
self.assertEqual(r, '{"foo.vendor": {"$regex": "cisco"}}')
# attribute contains word ("_" shortcut)
string = r'''_.vendor:cisco'''
r = self.parser.parse(string)
self.assertEqual(r, '{"attributes.vendor": {"$regex": "cisco"}}')
# attribute contains either words (default operator)
string = r'''attributes.vendor:(cisco juniper)'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"attributes.vendor": {"$regex": "cisco"}}, {"attributes.vendor": {"$regex": "juniper"}}]}')
# attribute contains either words ("_" shortcut, default operator)
string = r'''_.vendor:(cisco juniper)'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"attributes.vendor": {"$regex": "cisco"}}, {"attributes.vendor": {"$regex": "juniper"}}]}')
def test_wildcards(self):
# ? = single character, * = one or more characters
string = r'''text:qu?ck bro*'''
r = self.parser.parse(string)
self.assertEqual(
r, '{"$or": [{"text": {"$regex": "\\\\bqu.?ck\\\\b"}}, {"text": {"$regex": "\\\\bbro.*\\\\b"}}]}')
def test_regular_expressions(self):
string = r'''name:/joh?n(ath[oa]n)/'''
r = self.parser.parse(string)
self.assertEqual(r, '{"name": {"$regex": "joh?n(ath[oa]n)"}}')
def test_fuzziness(self):
pass
def test_proximity_searches(self):
pass
def test_ranges(self):
string = r'''date:[2012-01-01 TO 2012-12-31]'''
r = self.parser.parse(string)
self.assertEqual(
r, '{"$and": [{"date": {"$gte": "2012-01-01"}}, {"date": {"$lte": "2012-12-31"}}]}')
string = r'''count:[1 TO 5]'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"count": {"$gte": "1"}}, {"count": {"$lte": "5"}}]}')
string = r'''tag:{alpha TO omega}'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"tag": {"$gt": "alpha"}}, {"tag": {"$lt": "omega"}}]}')
string = r'''count:[10 TO *]'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"count": {"$gte": "10"}}, {}]}')
string = r'''date:{* TO 2012-01-01}'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{}, {"date": {"$lt": "2012-01-01"}}]}')
string = r'''count:[1 TO 5}'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"count": {"$gte": "1"}}, {"count": {"$lt": "5"}}]}')
def test_unbounded_ranges(self):
string = r'''age:>10'''
r = self.parser.parse(string)
self.assertEqual(r, '{"age": {"$gt": "10"}}')
string = r'''age:>=10'''
r = self.parser.parse(string)
self.assertEqual(r, '{"age": {"$gte": "10"}}')
string = r'''age:<10'''
r = self.parser.parse(string)
self.assertEqual(r, '{"age": {"$lt": "10"}}')
string = r'''age:<=10'''
r = self.parser.parse(string)
self.assertEqual(r, '{"age": {"$lte": "10"}}')
def test_boosting(self):
pass
def test_boolean_operators(self):
# OR (||)
string = r'''"jakarta apache" jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$regex": "jakarta"}}]}')
string = r'''"jakarta apache" OR jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$regex": "jakarta"}}]}')
string = r'''"jakarta apache" || jakarta'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$or": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$regex": "jakarta"}}]}')
# AND (&&)
string = r'''"jakarta apache" AND "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$regex": "Apache Lucene"}}]}')
string = r'''"jakarta apache" && "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$regex": "Apache Lucene"}}]}')
# + (required)
pass
# NOT (!)
string = r'''"jakarta apache" NOT "Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$not": {"$regex": "Apache Lucene"}}}]}')
string = r'''"jakarta apache" !"Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"text": {"$regex": "jakarta apache"}}, {"text": {"$not": {"$regex": "Apache Lucene"}}}]}')
string = r'''NOT "jakarta apache"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"text": {"$not": {"$regex": "jakarta apache"}}}')
string = r'''group:"jakarta apache" NOT group:"Apache Lucene"'''
r = self.parser.parse(string)
self.assertEqual(r, '{"$and": [{"group": {"$regex": "jakarta apache"}}, {"group": {"$not": {"$regex": "Apache Lucene"}}}]}')
# - (prohibit)
pass
def test_grouping(self):
# field exact match
string = r'''(quick OR brown) AND fox'''
r = self.parser.parse(string)
self.assertEqual(
r, '{"$and": [{"$or": [{"text": {"$regex": "quick"}}, {"text": {"$regex": "brown"}}]}, {"text": {"$regex": "fox"}}]}')
# field exact match
string = r'''status:(active OR pending) title:(full text search)'''
r = self.parser.parse(string)
self.assertEqual(
r, '{"$or": [{"$or": [{"status": {"$regex": "active"}}, {"status": {"$regex": "pending"}}]}, {"$or": [{"title": {"$regex": "full"}}, {"title": {"$regex": "text"}}]}]}')
| 37.189125
| 180
| 0.531435
|
4a46b749347c3bda65438d24af05f82fe48fd8a8
| 4,763
|
py
|
Python
|
graphene_sqlalchemy/fields.py
|
dubbl/graphene-sqlalchemy
|
e362e3fc4993d7e95873044bb4d00185b1d3dd8b
|
[
"MIT"
] | null | null | null |
graphene_sqlalchemy/fields.py
|
dubbl/graphene-sqlalchemy
|
e362e3fc4993d7e95873044bb4d00185b1d3dd8b
|
[
"MIT"
] | null | null | null |
graphene_sqlalchemy/fields.py
|
dubbl/graphene-sqlalchemy
|
e362e3fc4993d7e95873044bb4d00185b1d3dd8b
|
[
"MIT"
] | null | null | null |
import logging
from functools import partial
from promise import Promise, is_thenable
from sqlalchemy.orm.query import Query
from graphene.relay import Connection, ConnectionField
from graphene.relay.connection import PageInfo
from graphql_relay.connection.arrayconnection import connection_from_list_slice
from .utils import get_query, sort_argument_for_model
log = logging.getLogger()
class UnsortedSQLAlchemyConnectionField(ConnectionField):
@property
def type(self):
from .types import SQLAlchemyObjectType
_type = super(ConnectionField, self).type
if issubclass(_type, Connection):
return _type
assert issubclass(_type, SQLAlchemyObjectType), (
"SQLALchemyConnectionField only accepts SQLAlchemyObjectType types, not {}"
).format(_type.__name__)
assert _type._meta.connection, "The type {} doesn't have a connection".format(
_type.__name__
)
return _type._meta.connection
@property
def model(self):
return self.type._meta.node._meta.model
@classmethod
def get_query(cls, model, info, sort=None, **args):
query = get_query(model, info.context)
if sort is not None:
if isinstance(sort, str):
query = query.order_by(sort.value)
else:
query = query.order_by(*(col.value for col in sort))
return query
@classmethod
def resolve_connection(cls, connection_type, model, info, args, resolved):
if resolved is None:
resolved = cls.get_query(model, info, **args)
if isinstance(resolved, Query):
_len = resolved.count()
else:
_len = len(resolved)
connection = connection_from_list_slice(
resolved,
args,
slice_start=0,
list_length=_len,
list_slice_length=_len,
connection_type=connection_type,
pageinfo_type=PageInfo,
edge_type=connection_type.Edge,
)
connection.iterable = resolved
connection.length = _len
return connection
@classmethod
def connection_resolver(cls, resolver, connection_type, model, root, info, **args):
resolved = resolver(root, info, **args)
on_resolve = partial(cls.resolve_connection, connection_type, model, info, args)
if is_thenable(resolved):
return Promise.resolve(resolved).then(on_resolve)
return on_resolve(resolved)
def get_resolver(self, parent_resolver):
return partial(self.connection_resolver, parent_resolver, self.type, self.model)
class SQLAlchemyConnectionField(UnsortedSQLAlchemyConnectionField):
def __init__(self, type, *args, **kwargs):
if "sort" not in kwargs and issubclass(type, Connection):
# Let super class raise if type is not a Connection
try:
model = type.Edge.node._type._meta.model
kwargs.setdefault("sort", sort_argument_for_model(model))
except Exception:
raise Exception(
'Cannot create sort argument for {}. A model is required. Set the "sort" argument'
" to None to disabling the creation of the sort query argument".format(
type.__name__
)
)
elif "sort" in kwargs and kwargs["sort"] is None:
del kwargs["sort"]
super(SQLAlchemyConnectionField, self).__init__(type, *args, **kwargs)
def default_connection_field_factory(relationship, registry):
model = relationship.mapper.entity
model_type = registry.get_type_for_model(model)
return createConnectionField(model_type)
# TODO Remove in next major version
__connectionFactory = UnsortedSQLAlchemyConnectionField
def createConnectionField(_type):
log.warn(
'createConnectionField is deprecated and will be removed in the next '
'major version. Use SQLAlchemyObjectType.Meta.connection_field_factory instead.'
)
return __connectionFactory(_type)
def registerConnectionFieldFactory(factoryMethod):
log.warn(
'registerConnectionFieldFactory is deprecated and will be removed in the next '
'major version. Use SQLAlchemyObjectType.Meta.connection_field_factory instead.'
)
global __connectionFactory
__connectionFactory = factoryMethod
def unregisterConnectionFieldFactory():
log.warn(
'registerConnectionFieldFactory is deprecated and will be removed in the next '
'major version. Use SQLAlchemyObjectType.Meta.connection_field_factory instead.'
)
global __connectionFactory
__connectionFactory = UnsortedSQLAlchemyConnectionField
| 35.281481
| 102
| 0.675625
|
40694776e21d4a165a405b603a2f7d200527a3da
| 3,721
|
py
|
Python
|
core/systemcomponent.py
|
bahadley/dtest-controller
|
905621d76bd0613f69a233ccba86cce8efb3179c
|
[
"MIT"
] | null | null | null |
core/systemcomponent.py
|
bahadley/dtest-controller
|
905621d76bd0613f69a233ccba86cce8efb3179c
|
[
"MIT"
] | null | null | null |
core/systemcomponent.py
|
bahadley/dtest-controller
|
905621d76bd0613f69a233ccba86cce8efb3179c
|
[
"MIT"
] | null | null | null |
"""
systemcomponent.py: Contains the SystemComponent class.
Each SystemUnderTest has one or more SystemComponent instances. These
instances may correspond to running virtual machine instances, in the
case Open Stack compute, or to a vSwitch instance or veth pair in the
case of the Linux networking stack. The 'target' attribute will be used
to uniquely identify the component within the fault injector code. A
SystemComponent instance is a simple state machine. There are two possible
states: Operable and Nonoperable. A SystemComponent will transition
between states only when a 'state_transition' event is activated. By
default, events are not configured as 'state_transition'. A SystemComponent
will always remain in the Operable state when all the events assigned to
it are not 'state_transition' events.
"""
from time import time
from event import Event
class SystemComponent(object):
# SystemComponent instances have the following possible states.
OPERABLE = True
NONOPERABLE = False
def __init__(self, component_id, targets, config):
""" Create SystemComponent object.
component_id: id of the component
targets: a list of component identifiers which may be subject
to faults. Frequently, this will be a single entity,
but it could be a list of identifiers such that one
is randomly selected during event activation
config: a SessionConfig instance"""
self._id = component_id
self._targets = targets
self._state = self.OPERABLE
self._events = {self.OPERABLE:[], self.NONOPERABLE:[]}
# Time when the component was initialized. Used for sequencing
# events that have effective start and end times.
self._life_start_time = time()
# Time of the last event activation. Used to determine the
# elapsed time since the previous event. The time will mark the
# moment when the event is initially activated. The execution
# duration of the associated fault function is indeterminant.
self._last_event_time = time()
for e in config.get_events_for_component(self._id):
for _ in range(e[1]):
# Append # of events corresponding to 'instance' parameter
self._events[self.OPERABLE].append(
Event(self._id, self._targets,
e[0], config)
)
for e in config.get_events_for_component(self._id, False):
for _ in range(e[1]):
# Append # of events corresponding to 'instance' parameter
self._events[self.NONOPERABLE].append(
Event(self._id, self._targets,
e[0], config)
)
def checkpoint(self):
""" Determines whether any events associated with the component's
state need to be activated.
returns: list of Event instances which are active"""
# Build list of activated events.
active_events = [
e for e in self._events[self._state]
if e.is_active(self._life_start_time,
self._last_event_time)
]
for e in active_events:
e.set_executed()
self._last_event_time = time()
# Transition the component state if necessary.
if e.is_state_transition_event():
self._state = not self._state
return active_events
| 43.776471
| 78
| 0.615426
|
5d5feaa5dffc17cff6330b1f0cf5476c3c24c5e7
| 2,168
|
py
|
Python
|
backoffice/transactions/migrations/0001_initial.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
backoffice/transactions/migrations/0001_initial.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
backoffice/transactions/migrations/0001_initial.py
|
AlejandroUPC/pythonmicroservices
|
9d42bd6dfd9847ad4a8e6029e808de927292c251
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-23 08:01
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Customer',
fields=[
('customer_id', models.BigIntegerField(default='939349484081954', primary_key=True, serialize=False)),
('register_date', models.DateField(auto_now=True)),
('previous_ctmer', models.BooleanField(default=False)),
('has_sons', models.BooleanField()),
('is_married', models.BooleanField()),
('is_local', models.BooleanField()),
('residence', models.CharField(max_length=50)),
('address', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='SuperMarket',
fields=[
('supermarket_id', models.BigIntegerField(default='612066929378476', primary_key=True, serialize=False)),
('supermarket_name', models.CharField(max_length=100)),
('register_date', models.DateField(auto_now=True)),
],
),
migrations.CreateModel(
name='Transaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('transaction_id', models.BigIntegerField(default='869889841611220')),
('item_id', models.BigIntegerField()),
('pay_method', models.IntegerField()),
('price', models.FloatField()),
('item_cnt', models.SmallIntegerField()),
('bill_id', models.BigIntegerField()),
('transaction_dt', models.DateField(auto_now=True)),
('customer_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='transactions.customer')),
('sp_owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='transactions.supermarket')),
],
),
]
| 41.692308
| 124
| 0.579336
|
c147aefe1528699440c016b4c39cdd3077e9bf3d
| 9,166
|
py
|
Python
|
src/sentry/utils/pytest/sentry.py
|
skeptycal/sentry
|
b802f4a5e8ceb98390cf3ec03af4a73a7083c817
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/utils/pytest/sentry.py
|
skeptycal/sentry
|
b802f4a5e8ceb98390cf3ec03af4a73a7083c817
|
[
"BSD-3-Clause"
] | null | null | null |
src/sentry/utils/pytest/sentry.py
|
skeptycal/sentry
|
b802f4a5e8ceb98390cf3ec03af4a73a7083c817
|
[
"BSD-3-Clause"
] | null | null | null |
from __future__ import absolute_import
import pytest
import mock
import os
from django.conf import settings
from sentry_sdk import Hub
TEST_ROOT = os.path.normpath(
os.path.join(
os.path.dirname(__file__),
os.pardir,
os.pardir,
os.pardir,
os.pardir,
'tests'))
def pytest_configure(config):
# HACK: Only needed for testing!
os.environ.setdefault('_SENTRY_SKIP_CONFIGURATION', '1')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sentry.conf.server')
# override docs which are typically synchronized from an upstream server
# to ensure tests are consistent
os.environ.setdefault(
'INTEGRATION_DOC_FOLDER',
os.path.join(
TEST_ROOT,
'fixtures',
'integration-docs'))
from sentry.utils import integrationdocs
integrationdocs.DOC_FOLDER = os.environ['INTEGRATION_DOC_FOLDER']
if not settings.configured:
# only configure the db if its not already done
test_db = os.environ.get('DB', 'postgres')
if test_db == 'mysql':
settings.DATABASES['default'].update(
{
'ENGINE': 'django.db.backends.mysql',
'NAME': 'sentry',
'USER': 'root',
'HOST': '127.0.0.1',
}
)
# mysql requires running full migration all the time
elif test_db == 'postgres':
settings.DATABASES['default'].update(
{
'ENGINE': 'sentry.db.postgres',
'USER': 'postgres',
'NAME': 'sentry',
'HOST': '127.0.0.1',
}
)
# postgres requires running full migration all the time
# since it has to install stored functions which come from
# an actual migration.
elif test_db == 'sqlite':
settings.DATABASES['default'].update(
{
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
)
else:
raise RuntimeError('oops, wrong database: %r' % test_db)
settings.TEMPLATE_DEBUG = True
# Disable static compiling in tests
settings.STATIC_BUNDLES = {}
# override a few things with our test specifics
settings.INSTALLED_APPS = tuple(settings.INSTALLED_APPS) + ('tests', )
# Need a predictable key for tests that involve checking signatures
settings.SENTRY_PUBLIC = False
if not settings.SENTRY_CACHE:
settings.SENTRY_CACHE = 'sentry.cache.django.DjangoCache'
settings.SENTRY_CACHE_OPTIONS = {}
# This speeds up the tests considerably, pbkdf2 is by design, slow.
settings.PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
settings.AUTH_PASSWORD_VALIDATORS = []
# Replace real sudo middleware with our mock sudo middleware
# to assert that the user is always in sudo mode
middleware = list(settings.MIDDLEWARE_CLASSES)
sudo = middleware.index('sentry.middleware.sudo.SudoMiddleware')
middleware[sudo] = 'sentry.testutils.middleware.SudoMiddleware'
settings.MIDDLEWARE_CLASSES = tuple(middleware)
settings.SENTRY_OPTIONS['cloudflare.secret-key'] = 'cloudflare-secret-key'
# enable draft features
settings.SENTRY_OPTIONS['mail.enable-replies'] = True
settings.SENTRY_ALLOW_ORIGIN = '*'
settings.SENTRY_TSDB = 'sentry.tsdb.inmemory.InMemoryTSDB'
settings.SENTRY_TSDB_OPTIONS = {}
if settings.SENTRY_NEWSLETTER == 'sentry.newsletter.base.Newsletter':
settings.SENTRY_NEWSLETTER = 'sentry.newsletter.dummy.DummyNewsletter'
settings.SENTRY_NEWSLETTER_OPTIONS = {}
settings.BROKER_BACKEND = 'memory'
settings.BROKER_URL = None
settings.CELERY_ALWAYS_EAGER = False
settings.CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
settings.DEBUG_VIEWS = True
settings.SENTRY_ENCRYPTION_SCHEMES = ()
settings.DISABLE_RAVEN = True
settings.CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
if not hasattr(settings, 'SENTRY_OPTIONS'):
settings.SENTRY_OPTIONS = {}
settings.SENTRY_OPTIONS.update(
{
'redis.clusters': {
'default': {
'hosts': {
0: {
'db': 9,
},
},
},
},
'mail.backend': 'django.core.mail.backends.locmem.EmailBackend',
'system.url-prefix': 'http://testserver',
'slack.client-id': 'slack-client-id',
'slack.client-secret': 'slack-client-secret',
'slack.verification-token': 'slack-verification-token',
'github-app.name': 'sentry-test-app',
'github-app.client-id': 'github-client-id',
'github-app.client-secret': 'github-client-secret',
'vsts.client-id': 'vsts-client-id',
'vsts.client-secret': 'vsts-client-secret',
}
)
# django mail uses socket.getfqdn which doesn't play nice if our
# networking isn't stable
patcher = mock.patch('socket.getfqdn', return_value='localhost')
patcher.start()
if not settings.SOUTH_TESTS_MIGRATE:
settings.INSTALLED_APPS = tuple(i for i in settings.INSTALLED_APPS if i != 'south')
from sentry.runner.initializer import (
bootstrap_options, configure_structlog, initialize_receivers, fix_south,
bind_cache_to_option_store, setup_services
)
bootstrap_options(settings)
configure_structlog()
fix_south(settings)
import django
if hasattr(django, 'setup'):
django.setup()
bind_cache_to_option_store()
initialize_receivers()
setup_services()
register_extensions()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
# force celery registration
from sentry.celery import app # NOQA
# disable DISALLOWED_IPS
from sentry import http
http.DISALLOWED_IPS = set()
def register_extensions():
from sentry.plugins import plugins
from sentry.plugins.utils import TestIssuePlugin2
plugins.register(TestIssuePlugin2)
from sentry import integrations
from sentry.integrations.bitbucket import BitbucketIntegrationProvider
from sentry.integrations.example import (
ExampleIntegrationProvider, AliasedIntegrationProvider, ExampleRepositoryProvider
)
from sentry.integrations.github import GitHubIntegrationProvider
from sentry.integrations.github_enterprise import GitHubEnterpriseIntegrationProvider
from sentry.integrations.gitlab import GitlabIntegrationProvider
from sentry.integrations.jira import JiraIntegrationProvider
from sentry.integrations.jira_server import JiraServerIntegrationProvider
from sentry.integrations.slack import SlackIntegrationProvider
from sentry.integrations.vsts import VstsIntegrationProvider
from sentry.integrations.vsts_extension import VstsExtensionIntegrationProvider
integrations.register(BitbucketIntegrationProvider)
integrations.register(ExampleIntegrationProvider)
integrations.register(AliasedIntegrationProvider)
integrations.register(GitHubIntegrationProvider)
integrations.register(GitHubEnterpriseIntegrationProvider)
integrations.register(GitlabIntegrationProvider)
integrations.register(JiraIntegrationProvider)
integrations.register(JiraServerIntegrationProvider)
integrations.register(SlackIntegrationProvider)
integrations.register(VstsIntegrationProvider)
integrations.register(VstsExtensionIntegrationProvider)
from sentry.plugins import bindings
from sentry.plugins.providers.dummy import DummyRepositoryProvider
bindings.add('repository.provider', DummyRepositoryProvider, id='dummy')
bindings.add(
'integration-repository.provider',
ExampleRepositoryProvider,
id='integrations:example')
def pytest_runtest_teardown(item):
from sentry import tsdb
# TODO(dcramer): this only works if this is the correct tsdb backend
tsdb.flush()
# XXX(dcramer): only works with DummyNewsletter
from sentry import newsletter
if hasattr(newsletter.backend, 'clear'):
newsletter.backend.clear()
from sentry.utils.redis import clusters
with clusters.get('default').all() as client:
client.flushdb()
from celery.task.control import discard_all
discard_all()
from sentry.models import OrganizationOption, ProjectOption, UserOption
for model in (OrganizationOption, ProjectOption, UserOption):
model.objects.clear_local_cache()
Hub.main.bind_client(None)
@pytest.fixture(autouse=True)
def _mock_skip_to_python(monkeypatch, request):
from sentry.interfaces.base import RUST_RENORMALIZED_DEFAULT
monkeypatch.setattr(
'sentry.models.event._should_skip_to_python',
lambda _: RUST_RENORMALIZED_DEFAULT)
| 33.452555
| 91
| 0.669976
|
e41d19a9b87a6a6490e516e0a901ba98755bc522
| 190
|
py
|
Python
|
sui/ml/__init__.py
|
AdrianLeonLhy/sui
|
06e0636e98e862a0fa95d97cfd56b1f0ef51b299
|
[
"MIT"
] | null | null | null |
sui/ml/__init__.py
|
AdrianLeonLhy/sui
|
06e0636e98e862a0fa95d97cfd56b1f0ef51b299
|
[
"MIT"
] | null | null | null |
sui/ml/__init__.py
|
AdrianLeonLhy/sui
|
06e0636e98e862a0fa95d97cfd56b1f0ef51b299
|
[
"MIT"
] | null | null | null |
"""sui.ml
Machine learning algorithm implementations
"""
from __future__ import absolute_import, division, print_function, unicode_literals
from .mf import FunkSVD, BiasSVD, SVDpp, BPR, ALS
| 31.666667
| 82
| 0.810526
|
3cceb4a5a0c1d6e976e2929ea7a837914bd0778a
| 9,080
|
py
|
Python
|
functionaltests/api/v1/test_assembly.py
|
devdattakulkarni/test-solum
|
4e9ddb82d217116aa2c30a6f2581080cbdfae325
|
[
"Apache-2.0"
] | null | null | null |
functionaltests/api/v1/test_assembly.py
|
devdattakulkarni/test-solum
|
4e9ddb82d217116aa2c30a6f2581080cbdfae325
|
[
"Apache-2.0"
] | null | null | null |
functionaltests/api/v1/test_assembly.py
|
devdattakulkarni/test-solum
|
4e9ddb82d217116aa2c30a6f2581080cbdfae325
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Copyright 2013 - Noorul Islam K M
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from tempest_lib import exceptions as tempest_exceptions
from functionaltests.api import base
sample_data = {"name": "test_assembly",
"description": "A test to create assembly",
"project_id": "project_id",
"user_id": "user_id",
"status": "QUEUED",
"application_uri": "http://localhost:5000"}
plan_sample_data = {"version": "1",
"name": "test_plan",
"description": "A test to create plan",
"project_id": "project_id",
"user_id": "user_id"}
class TestAssemblyController(base.TestCase):
def setUp(self):
super(TestAssemblyController, self).setUp()
def tearDown(self):
super(TestAssemblyController, self).tearDown()
self.client.delete_created_assemblies()
self.client.delete_created_plans()
def _assert_output_expected(self, body_data, data):
self.assertEqual(body_data['description'], data['description'])
self.assertEqual(body_data['plan_uri'], data['plan_uri'])
self.assertEqual(body_data['name'], data['name'])
self.assertIsNotNone(body_data['uuid'])
self.assertEqual(body_data['status'], data['status'])
self.assertEqual(body_data['application_uri'], data['application_uri'])
def test_assemblies_get_all(self):
# Create assemblies to find
p_resp_1 = self.client.create_plan()
self.assertEqual(p_resp_1.status, 201)
a_resp_1 = self.client.create_assembly(data=sample_data,
plan_uuid=p_resp_1.uuid)
self.assertEqual(a_resp_1.status, 201)
p_resp_2 = self.client.create_plan()
self.assertEqual(p_resp_2.status, 201)
a_resp_2 = self.client.create_assembly(data=sample_data,
plan_uuid=p_resp_2.uuid)
self.assertEqual(a_resp_2.status, 201)
# Get list of all assemblies
resp, body = self.client.get('v1/assemblies')
self.assertEqual(resp.status, 200)
# Search for uuids of created assemblies
assembly_list = json.loads(body)
found_uuid_1 = False
found_uuid_2 = False
for assembly in assembly_list:
uuid = json.dumps(assembly['uuid'])
if a_resp_1.uuid in uuid:
found_uuid_1 = True
elif a_resp_2.uuid in uuid:
found_uuid_2 = True
self.assertTrue(found_uuid_1,
'Cannot find assembly [%s] in list of all assemblies.'
% a_resp_1.uuid)
self.assertTrue(found_uuid_2,
'Cannot find assembly [%s] in list of all assemblies.'
% a_resp_2.uuid)
def test_assemblies_create(self):
plan_resp = self.client.create_plan()
self.assertEqual(plan_resp.status, 201)
assembly_resp = self.client.create_assembly(
plan_uuid=plan_resp.uuid,
data=sample_data)
self.assertEqual(assembly_resp.status, 201)
sample_data['plan_uri'] = "%s/v1/plans/%s" % (self.client.base_url,
plan_resp.uuid)
self._assert_output_expected(assembly_resp.data, sample_data)
def test_assemblies_create_none(self):
self.assertRaises(tempest_exceptions.BadRequest,
self.client.post, 'v1/assemblies', "{}")
def test_assemblies_get(self):
plan_resp = self.client.create_plan(data=plan_sample_data)
self.assertEqual(plan_resp.status, 201)
plan_uuid = plan_resp.uuid
assembly_resp = self.client.create_assembly(
plan_uuid=plan_uuid,
data=sample_data)
self.assertEqual(assembly_resp.status, 201)
uuid = assembly_resp.uuid
sample_data['plan_uri'] = "%s/v1/plans/%s" % (self.client.base_url,
plan_uuid)
resp, body = self.client.get('v1/assemblies/%s' % uuid)
self.assertEqual(resp.status, 200)
json_data = json.loads(body)
self._assert_output_expected(json_data, sample_data)
# Now check that HTTPS is respected. No new assemblies are created.
for k in ['plan_uri', 'trigger_uri']:
if k in sample_data:
sample_data[k] = sample_data[k].replace('http:', 'https:', 1)
use_https = {'X-Forwarded-Proto': 'https'}
resp, body = self.client.get('v1/assemblies/%s' % uuid,
headers=use_https)
self.assertEqual(resp.status, 200)
json_data = json.loads(body)
self._assert_output_expected(json_data, sample_data)
def test_assemblies_get_not_found(self):
self.assertRaises(tempest_exceptions.NotFound,
self.client.get, 'v1/assemblies/not_found')
def test_assemblies_put(self):
plan_resp = self.client.create_plan()
self.assertEqual(plan_resp.status, 201)
plan_uuid = plan_resp.uuid
assembly_resp = self.client.create_assembly(
plan_uuid=plan_uuid,
data=sample_data)
self.assertEqual(assembly_resp.status, 201)
uuid = assembly_resp.uuid
uri = "%s/v1/plans/%s" % (self.client.base_url, plan_uuid)
updated_data = {"name": "test_assembly_updated",
"description": "A test to create assembly updated",
"plan_uri": uri,
"user_id": "user_id updated",
"status": "new_status",
"application_uri": "new_uri"}
updated_json = json.dumps(updated_data)
resp, body = self.client.put('v1/assemblies/%s' % uuid, updated_json)
self.assertEqual(resp.status, 200)
json_data = json.loads(body)
self._assert_output_expected(json_data, updated_data)
def test_assemblies_put_not_found(self):
updated_data = {"name": "test_assembly_updated",
"description": "A test to create assembly updated",
"plan_uri": 'fake_uri',
"project_id": "project_id updated",
"user_id": "user_id updated",
"status": "new_status",
"application_uri": "new_uri"}
updated_json = json.dumps(updated_data)
self.assertRaises(tempest_exceptions.NotFound,
self.client.put, 'v1/assemblies/not_found',
updated_json)
def test_assemblies_put_none(self):
self.assertRaises(tempest_exceptions.BadRequest,
self.client.put, 'v1/assemblies/any', "{}")
def test_assemblies_put_cannot_update(self):
plan_resp = self.client.create_plan()
self.assertEqual(plan_resp.status, 201)
plan_uuid = plan_resp.uuid
assembly_resp = self.client.create_assembly(
plan_uuid=plan_uuid,
data=sample_data)
self.assertEqual(assembly_resp.status, 201)
uuid = assembly_resp.uuid
immutables = [
('id', 'new_assembly_id'),
('uuid', 'new_assembly_uuid'),
('project_id', 'new_project_id'),
]
for key_value in immutables:
updated_data = dict([key_value])
updated_json = json.dumps(updated_data)
self.assertRaises(tempest_exceptions.BadRequest,
self.client.put,
'v1/assemblies/%s' % uuid,
updated_json)
def test_assemblies_delete(self):
plan_resp = self.client.create_plan()
self.assertEqual(plan_resp.status, 201)
assembly_resp = self.client.create_assembly(
plan_uuid=plan_resp.uuid,
data=sample_data)
self.assertEqual(assembly_resp.status, 201)
uuid = assembly_resp.uuid
resp, body = self.client.delete_assembly(uuid)
self.assertEqual(resp.status, 204)
self.assertEqual(body, '')
self.assertTrue(self.client.assembly_delete_done(uuid),
"Assembly couldn't be deleted.")
def test_assemblies_delete_not_found(self):
self.assertRaises(tempest_exceptions.NotFound,
self.client.delete, 'v1/assemblies/not_found')
| 42.037037
| 79
| 0.601322
|
a5d20fc343d4be628da04796e7bca793e5964391
| 272
|
py
|
Python
|
tests/artificial/transf_Quantization/trend_MovingAverage/cycle_5/ar_/test_artificial_1024_Quantization_MovingAverage_5__20.py
|
shaido987/pyaf
|
b9afd089557bed6b90b246d3712c481ae26a1957
|
[
"BSD-3-Clause"
] | 377
|
2016-10-13T20:52:44.000Z
|
2022-03-29T18:04:14.000Z
|
tests/artificial/transf_Quantization/trend_MovingAverage/cycle_5/ar_/test_artificial_1024_Quantization_MovingAverage_5__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 160
|
2016-10-13T16:11:53.000Z
|
2022-03-28T04:21:34.000Z
|
tests/artificial/transf_Quantization/trend_MovingAverage/cycle_5/ar_/test_artificial_1024_Quantization_MovingAverage_5__20.py
|
ysdede/pyaf
|
b5541b8249d5a1cfdc01f27fdfd99b6580ed680b
|
[
"BSD-3-Clause"
] | 63
|
2017-03-09T14:51:18.000Z
|
2022-03-27T20:52:57.000Z
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 5, transform = "Quantization", sigma = 0.0, exog_count = 20, ar_order = 0);
| 38.857143
| 172
| 0.738971
|
be9539a7f5bd4efaae9503c59a7aa6046578e273
| 316
|
py
|
Python
|
packages/pyright-internal/src/tests/samples/super3.py
|
sasano8/pyright
|
e804f324ee5dbd25fd37a258791b3fd944addecd
|
[
"MIT"
] | 4,391
|
2019-05-07T01:18:57.000Z
|
2022-03-31T20:45:44.000Z
|
packages/pyright-internal/src/tests/samples/super3.py
|
sasano8/pyright
|
e804f324ee5dbd25fd37a258791b3fd944addecd
|
[
"MIT"
] | 2,740
|
2019-05-07T03:29:30.000Z
|
2022-03-31T12:57:46.000Z
|
packages/pyright-internal/src/tests/samples/super3.py
|
sasano8/pyright
|
e804f324ee5dbd25fd37a258791b3fd944addecd
|
[
"MIT"
] | 455
|
2019-05-07T12:55:14.000Z
|
2022-03-31T17:09:15.000Z
|
# This sample tests the case where super() is used for a class
# whose base classes are of unknown types.
from some_module import ClassUnknown # type: ignore
class Class1(ClassUnknown):
def __init__(self, x: int):
# This should not generate an error.
super(Class1, self).__init__(x, 1, 2, 3)
| 28.727273
| 62
| 0.696203
|
74589c02a6af4bcb2d4d177c2d54ed3bc8812ae0
| 10,048
|
py
|
Python
|
lib/googlecloudsdk/third_party/apis/oslogin/v1/oslogin_v1_messages.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | null | null | null |
lib/googlecloudsdk/third_party/apis/oslogin/v1/oslogin_v1_messages.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | 11
|
2020-02-29T02:51:12.000Z
|
2022-03-30T23:20:08.000Z
|
lib/googlecloudsdk/third_party/apis/oslogin/v1/oslogin_v1_messages.py
|
kustodian/google-cloud-sdk
|
b6bae4137d4b58030adb3dcb1271216dfb19f96d
|
[
"Apache-2.0"
] | 1
|
2020-07-24T18:47:35.000Z
|
2020-07-24T18:47:35.000Z
|
"""Generated message classes for oslogin version v1.
You can use OS Login to manage access to your VM instances using IAM roles.
For more information, read [OS Login](/compute/docs/oslogin/).
"""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.protorpclite import messages as _messages
from apitools.base.py import encoding
package = 'oslogin'
class Empty(_messages.Message):
r"""A generic empty message that you can re-use to avoid defining duplicated
empty messages in your APIs. A typical example is to use it as the request
or the response type of an API method. For instance: service Foo {
rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The
JSON representation for `Empty` is empty JSON object `{}`.
"""
class ImportSshPublicKeyResponse(_messages.Message):
r"""A response message for importing an SSH public key.
Fields:
loginProfile: The login profile information for the user.
"""
loginProfile = _messages.MessageField('LoginProfile', 1)
class LoginProfile(_messages.Message):
r"""The user profile information used for logging in to a virtual machine on
Google Compute Engine.
Messages:
SshPublicKeysValue: A map from SSH public key fingerprint to the
associated key object.
Fields:
name: A unique user ID.
posixAccounts: The list of POSIX accounts associated with the user.
sshPublicKeys: A map from SSH public key fingerprint to the associated key
object.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class SshPublicKeysValue(_messages.Message):
r"""A map from SSH public key fingerprint to the associated key object.
Messages:
AdditionalProperty: An additional property for a SshPublicKeysValue
object.
Fields:
additionalProperties: Additional properties of type SshPublicKeysValue
"""
class AdditionalProperty(_messages.Message):
r"""An additional property for a SshPublicKeysValue object.
Fields:
key: Name of the additional property.
value: A SshPublicKey attribute.
"""
key = _messages.StringField(1)
value = _messages.MessageField('SshPublicKey', 2)
additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
name = _messages.StringField(1)
posixAccounts = _messages.MessageField('PosixAccount', 2, repeated=True)
sshPublicKeys = _messages.MessageField('SshPublicKeysValue', 3)
class OsloginUsersGetLoginProfileRequest(_messages.Message):
r"""A OsloginUsersGetLoginProfileRequest object.
Fields:
name: The unique ID for the user in format `users/{user}`.
projectId: The project ID of the Google Cloud Platform project.
systemId: A system ID for filtering the results of the request.
"""
name = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
systemId = _messages.StringField(3)
class OsloginUsersImportSshPublicKeyRequest(_messages.Message):
r"""A OsloginUsersImportSshPublicKeyRequest object.
Fields:
parent: The unique ID for the user in format `users/{user}`.
projectId: The project ID of the Google Cloud Platform project.
sshPublicKey: A SshPublicKey resource to be passed as the request body.
"""
parent = _messages.StringField(1, required=True)
projectId = _messages.StringField(2)
sshPublicKey = _messages.MessageField('SshPublicKey', 3)
class OsloginUsersProjectsDeleteRequest(_messages.Message):
r"""A OsloginUsersProjectsDeleteRequest object.
Fields:
name: A reference to the POSIX account to update. POSIX accounts are
identified by the project ID they are associated with. A reference to
the POSIX account is in format `users/{user}/projects/{project}`.
"""
name = _messages.StringField(1, required=True)
class OsloginUsersSshPublicKeysDeleteRequest(_messages.Message):
r"""A OsloginUsersSshPublicKeysDeleteRequest object.
Fields:
name: The fingerprint of the public key to update. Public keys are
identified by their SHA-256 fingerprint. The fingerprint of the public
key is in format `users/{user}/sshPublicKeys/{fingerprint}`.
"""
name = _messages.StringField(1, required=True)
class OsloginUsersSshPublicKeysGetRequest(_messages.Message):
r"""A OsloginUsersSshPublicKeysGetRequest object.
Fields:
name: The fingerprint of the public key to retrieve. Public keys are
identified by their SHA-256 fingerprint. The fingerprint of the public
key is in format `users/{user}/sshPublicKeys/{fingerprint}`.
"""
name = _messages.StringField(1, required=True)
class OsloginUsersSshPublicKeysPatchRequest(_messages.Message):
r"""A OsloginUsersSshPublicKeysPatchRequest object.
Fields:
name: The fingerprint of the public key to update. Public keys are
identified by their SHA-256 fingerprint. The fingerprint of the public
key is in format `users/{user}/sshPublicKeys/{fingerprint}`.
sshPublicKey: A SshPublicKey resource to be passed as the request body.
updateMask: Mask to control which fields get updated. Updates all if not
present.
"""
name = _messages.StringField(1, required=True)
sshPublicKey = _messages.MessageField('SshPublicKey', 2)
updateMask = _messages.StringField(3)
class PosixAccount(_messages.Message):
r"""The POSIX account information associated with a Google account.
Enums:
OperatingSystemTypeValueValuesEnum: The operating system type where this
account applies.
Fields:
accountId: Output only. A POSIX account identifier.
gecos: The GECOS (user information) entry for this account.
gid: The default group ID.
homeDirectory: The path to the home directory for this account.
operatingSystemType: The operating system type where this account applies.
primary: Only one POSIX account can be marked as primary.
shell: The path to the logic shell for this account.
systemId: System identifier for which account the username or uid applies
to. By default, the empty value is used.
uid: The user ID.
username: The username of the POSIX account.
"""
class OperatingSystemTypeValueValuesEnum(_messages.Enum):
r"""The operating system type where this account applies.
Values:
OPERATING_SYSTEM_TYPE_UNSPECIFIED: The operating system type associated
with the user account information is unspecified.
LINUX: Linux user account information.
WINDOWS: Windows user account information.
"""
OPERATING_SYSTEM_TYPE_UNSPECIFIED = 0
LINUX = 1
WINDOWS = 2
accountId = _messages.StringField(1)
gecos = _messages.StringField(2)
gid = _messages.IntegerField(3)
homeDirectory = _messages.StringField(4)
operatingSystemType = _messages.EnumField('OperatingSystemTypeValueValuesEnum', 5)
primary = _messages.BooleanField(6)
shell = _messages.StringField(7)
systemId = _messages.StringField(8)
uid = _messages.IntegerField(9)
username = _messages.StringField(10)
class SshPublicKey(_messages.Message):
r"""The SSH public key information associated with a Google account.
Fields:
expirationTimeUsec: An expiration time in microseconds since epoch.
fingerprint: Output only. The SHA-256 fingerprint of the SSH public key.
key: Public key text in SSH format, defined by <a
href="https://www.ietf.org/rfc/rfc4253.txt" target="_blank">RFC4253</a>
section 6.6.
"""
expirationTimeUsec = _messages.IntegerField(1)
fingerprint = _messages.StringField(2)
key = _messages.StringField(3)
class StandardQueryParameters(_messages.Message):
r"""Query parameters accepted by all methods.
Enums:
FXgafvValueValuesEnum: V1 error format.
AltValueValuesEnum: Data format for response.
Fields:
f__xgafv: V1 error format.
access_token: OAuth access token.
alt: Data format for response.
callback: JSONP
fields: Selector specifying which fields to include in a partial response.
key: API key. Your API key identifies your project and provides you with
API access, quota, and reports. Required unless you provide an OAuth 2.0
token.
oauth_token: OAuth 2.0 token for the current user.
prettyPrint: Returns response with indentations and line breaks.
quotaUser: Available to use for quota purposes for server-side
applications. Can be any arbitrary string assigned to a user, but should
not exceed 40 characters.
trace: A tracing token of the form "token:<tokenid>" to include in api
requests.
uploadType: Legacy upload protocol for media (e.g. "media", "multipart").
upload_protocol: Upload protocol for media (e.g. "raw", "multipart").
"""
class AltValueValuesEnum(_messages.Enum):
r"""Data format for response.
Values:
json: Responses with Content-Type of application/json
media: Media download with context-dependent Content-Type
proto: Responses with Content-Type of application/x-protobuf
"""
json = 0
media = 1
proto = 2
class FXgafvValueValuesEnum(_messages.Enum):
r"""V1 error format.
Values:
_1: v1 error format
_2: v2 error format
"""
_1 = 0
_2 = 1
f__xgafv = _messages.EnumField('FXgafvValueValuesEnum', 1)
access_token = _messages.StringField(2)
alt = _messages.EnumField('AltValueValuesEnum', 3, default=u'json')
callback = _messages.StringField(4)
fields = _messages.StringField(5)
key = _messages.StringField(6)
oauth_token = _messages.StringField(7)
prettyPrint = _messages.BooleanField(8, default=True)
quotaUser = _messages.StringField(9)
trace = _messages.StringField(10)
uploadType = _messages.StringField(11)
upload_protocol = _messages.StringField(12)
encoding.AddCustomJsonFieldMapping(
StandardQueryParameters, 'f__xgafv', '$.xgafv')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_1', '1')
encoding.AddCustomJsonEnumMapping(
StandardQueryParameters.FXgafvValueValuesEnum, '_2', '2')
| 34.410959
| 89
| 0.741242
|
e7788bca798881af7070f98c8951744a173186c3
| 8,693
|
py
|
Python
|
tests/layerGrad/layerGrad-sd.py
|
simonlegrand/pysit
|
1fb1a80839ceebef12a8d71aa9c295b65b08bac4
|
[
"BSD-3-Clause"
] | 1
|
2022-01-12T13:19:16.000Z
|
2022-01-12T13:19:16.000Z
|
tests/layerGrad/layerGrad-sd.py
|
simonlegrand/pysit
|
1fb1a80839ceebef12a8d71aa9c295b65b08bac4
|
[
"BSD-3-Clause"
] | null | null | null |
tests/layerGrad/layerGrad-sd.py
|
simonlegrand/pysit
|
1fb1a80839ceebef12a8d71aa9c295b65b08bac4
|
[
"BSD-3-Clause"
] | null | null | null |
# Std import block
import time
import pickle
import numpy as np
import copy
import sys
import scipy.io as sio
from pysit import *
from pysit.gallery import layered_gradient_medium
from pysit.util.io import *
from pysit.util.parallel import *
from mpi4py import MPI
if __name__ == '__main__':
ExpDir = '.'
comm = MPI.COMM_WORLD
size = comm.Get_size()
rank = comm.Get_rank()
pwrap = ParallelWrapShot()
if rank == 0:
ttt = time.time()
sys.stdout.write('Layer-Gradient model \n')
# Set up domain, mesh and velocity model
m_param = { 'x_length' : 15.0,
'z_depth' : 4.0,
'velocity' : (1.5, 2.0, 4.0),
'layer_thickness' : (1.0, 1.0, 2.0),
'initial_model_style' : 'constant',
'initial_config' : {'velocity':1.5},
}
C, C0, m, d = layered_gradient_medium(model_param=m_param,
dx = 0.05, dz = 0.05,
initial_model_style=m_param['initial_model_style'],
initial_config=m_param['initial_config'])
if rank == 0:
sys.stdout.write('initial_model_style = %s \n' %m_param['initial_model_style'])
sys.stdout.write('initial_config = %s \n' %m_param['initial_config'])
# Set up shots
zmin = d.z.lbound
zmax = d.z.rbound
zpos = 0.05 * 1.0
Nshots = size
Nreceivers = 'max'
Ric_freq = 10.0
sys.stdout.write("{0}: {1}\n".format(rank, Nshots / size))
shots = equispaced_acquisition(m,
RickerWavelet(Ric_freq),
sources=Nshots,
source_depth=zpos,
source_kwargs={},
receivers=Nreceivers,
receiver_depth=zpos,
receiver_kwargs={},
parallel_shot_wrap=pwrap,
)
# shots_freq = copy.deepcopy(shots)
# Define and configure the wave solver
t_range = (0.0,6.0)
solver = ConstantDensityAcousticWave(m,
spatial_accuracy_order=6,
trange=t_range,
kernel_implementation='cpp',
)
# Generate synthetic Seismic data
if rank == 0:
sys.stdout.write('Model parameters setting: \n')
sys.stdout.write('Nshots = %d \n' %Nshots)
if Nreceivers == 'max':
sys.stdout.write('Nreceivers = %d \n' %m.x.n)
else:
sys.stdout.write('Nreceivers = %d \n' %Nreceivers)
sys.stdout.write('Ricker wavelet frequency = %.1f Hz \n' %Ric_freq)
sys.stdout.write('Recording time = %.1f s\n' %t_range[1])
sys.stdout.write('Generating data... \n')
initial_model = solver.ModelParameters(m,{'C': C0})
generate_seismic_data(shots, solver, initial_model)
wavefield_initial = comm.gather(shots[0].receivers.data, root=0)
base_model = solver.ModelParameters(m,{'C': C})
tt = time.time()
generate_seismic_data(shots, solver, base_model)
wavefield_true = comm.gather(shots[0].receivers.data, root=0)
sys.stdout.write('{1}:Data generation: {0}s\n'.format(time.time()-tt,rank))
sys.stdout.flush()
comm.Barrier()
if rank == 0:
tttt = time.time()-ttt
sys.stdout.write('Total wall time: {0}\n'.format(tttt))
sys.stdout.write('Total wall time/shot: {0}\n'.format(tttt/Nshots))
############# Set up objective function ##############
ot_param = { 'sinkhorn_iterations' : 10000,
'sinkhorn_tolerance' : 1.0e-9,
'epsilon_maxsmooth' : 1.0e-5, # for the smoothing of the max(., 0)
'successive_over_relaxation' : 1.4,
'trans_func_type' : 'smooth_max', ## smooth_max ## exp ## square ## id ##
'epsilon_kl' : 1e-2,
'lamb_kl' : 0.1,
't_scale' : 20.0,
'x_scale' : 1.0,
'nt_resampling' : 128,
'sinkhorn_initialization' : True,
# 'Noise' : False,
'N_receivers' : Nreceivers,
'filter_op' : False,
'freq_band' : [1, 30.0],
}
#### Least-squares objective function
# if rank == 0:
# print('Least-squares...')
# objective = TemporalLeastSquares(solver, parallel_wrap_shot=pwrap)
#### Sinkhorn-Divergence objective function
if rank == 0:
print('Sinkhorn Divergence...')
print('Sinkhorn Divergence parameters setting:')
print('trans_func_type = %s' %ot_param['trans_func_type'])
print('sinkhorn_initialization = %s' %ot_param['sinkhorn_initialization'])
print('sinkhorn_epsilon_kl = %.1f' %ot_param['epsilon_kl'])
print('sinkhorn_lamb_kl = %.1f' %ot_param['lamb_kl'])
print('sinkhorn_t_scale = %.1f' %ot_param['t_scale'])
print('sinkhorn_x_scale = %.1f' %ot_param['x_scale'])
print('sinkhorn_nt_resampling = %d' %ot_param['nt_resampling'])
objective = SinkhornDivergence(solver, ot_param=ot_param, parallel_wrap_shot=pwrap)
# Define the inversion algorithm
line_search = 'backtrack'
status_configuration = {'value_frequency' : 1,
'residual_frequency' : 1,
'residual_length_frequency' : 1,
'objective_frequency' : 1,
'step_frequency' : 1,
'step_length_frequency' : 1,
'gradient_frequency' : 1,
'gradient_length_frequency' : 1,
'run_time_frequency' : 1,
'alpha_frequency' : 1,
}
# print('Running GradientDescent...')
# invalg = GradientDescent(objective)
# print('Running PQN...')
# bound = [1.5, 6.5]
# Proj_Op1 = BoxConstraintPrj(bound)
# invalg_1 = PQN(objective, proj_op=Proj_Op1, memory_length=10)
if rank == 0:
print('Running LBFGS...')
invalg = LBFGS(objective, memory_length=10)
initial_value = solver.ModelParameters(m, {'C': C0})
# Execute inversion algorithm
tt = time.time()
nsteps = 100
result = invalg(shots, initial_value, nsteps,
line_search=line_search,
status_configuration=status_configuration, verbose=True, write=True)
initial_value.data = result.C
C_cut = initial_value.without_padding().data
C_inverted = C_cut.reshape(m.shape(as_grid=True)).transpose()
####################################################################################################
# Save wavefield
inverted_model = solver.ModelParameters(m,{'C': C_cut})
generate_seismic_data(shots, solver, inverted_model)
wavefield_inverted = comm.gather(shots[0].receivers.data, root=0)
# SaveData
if rank == 0:
############ Saving results ###########################
with open('mesh.p', 'wb') as f:
pickle.dump(m, f)
conv_vals = np.array([v for k,v in list(invalg.objective_history.items())])
initial_value.data = invalg.gradient_history[0].data
gradient = initial_value.without_padding().data.reshape(m.shape(as_grid=True)).transpose()
ns = int(np.shape(wavefield_true)[0]/2)
output = {'conv': conv_vals,
'inverted': C_inverted,
'true': C.reshape(m.shape(as_grid=True)).transpose(),
'initial': C0.reshape(m.shape(as_grid=True)).transpose(),
'wavefield_true': wavefield_true[ns],
'wavefield_initial': wavefield_initial[ns],
'wavefield_inverted': wavefield_inverted[ns],
'gradient': gradient,
'x_range': [d.x.lbound, d.x.rbound],
'z_range': [d.z.lbound, d.z.rbound],
't_range': t_range,
'obj_name': objective.name(),
}
sio.savemat('./output.mat', output)
| 39.334842
| 104
| 0.510986
|
784a2cf880de0139031f5e39ce81a4b5c05eb9cd
| 4,195
|
py
|
Python
|
nipyapi/nifi/models/dimensions_dto.py
|
Paul-Verardi/nipyapi
|
7a709611d9cf30e4ce8943db4d4dd617f2f7c81c
|
[
"Apache-2.0"
] | null | null | null |
nipyapi/nifi/models/dimensions_dto.py
|
Paul-Verardi/nipyapi
|
7a709611d9cf30e4ce8943db4d4dd617f2f7c81c
|
[
"Apache-2.0"
] | 1
|
2018-11-13T21:01:33.000Z
|
2018-11-13T21:01:33.000Z
|
nipyapi/nifi/models/dimensions_dto.py
|
Paul-Verardi/nipyapi
|
7a709611d9cf30e4ce8943db4d4dd617f2f7c81c
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
NiFi Rest Api
The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.7.1
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class DimensionsDTO(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'width': 'float',
'height': 'float'
}
attribute_map = {
'width': 'width',
'height': 'height'
}
def __init__(self, width=None, height=None):
"""
DimensionsDTO - a model defined in Swagger
"""
self._width = None
self._height = None
if width is not None:
self.width = width
if height is not None:
self.height = height
@property
def width(self):
"""
Gets the width of this DimensionsDTO.
The width of the label in pixels when at a 1:1 scale.
:return: The width of this DimensionsDTO.
:rtype: float
"""
return self._width
@width.setter
def width(self, width):
"""
Sets the width of this DimensionsDTO.
The width of the label in pixels when at a 1:1 scale.
:param width: The width of this DimensionsDTO.
:type: float
"""
self._width = width
@property
def height(self):
"""
Gets the height of this DimensionsDTO.
The height of the label in pixels when at a 1:1 scale.
:return: The height of this DimensionsDTO.
:rtype: float
"""
return self._height
@height.setter
def height(self, height):
"""
Sets the height of this DimensionsDTO.
The height of the label in pixels when at a 1:1 scale.
:param height: The height of this DimensionsDTO.
:type: float
"""
self._height = height
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, DimensionsDTO):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| 27.24026
| 479
| 0.538975
|
ac9f038ba79ad80c448f788278ad0a2a076bc08c
| 170
|
py
|
Python
|
setup.py
|
er-knight/photo-magic
|
4b122c76785c718df0aceb5833e2ade475595fc6
|
[
"MIT"
] | 1
|
2021-11-14T18:34:26.000Z
|
2021-11-14T18:34:26.000Z
|
setup.py
|
er-knight/photo-magic
|
4b122c76785c718df0aceb5833e2ade475595fc6
|
[
"MIT"
] | null | null | null |
setup.py
|
er-knight/photo-magic
|
4b122c76785c718df0aceb5833e2ade475595fc6
|
[
"MIT"
] | null | null | null |
from setuptools import setup
from Cython.Build import cythonize
setup(
ext_modules=cythonize("lfsr.py",
compiler_directives={"language_level" : "3"}
),
)
| 21.25
| 52
| 0.705882
|
1348168df3b64ab99c73bfbc440167f99e45f1cc
| 7,974
|
py
|
Python
|
unsupervised_learning/clustering/agglomerative_clustering.py
|
cesar0205/machine_learning_algorithms
|
7cd7c7295e2fb7357123cd81cb3e43c485930d74
|
[
"Apache-2.0"
] | null | null | null |
unsupervised_learning/clustering/agglomerative_clustering.py
|
cesar0205/machine_learning_algorithms
|
7cd7c7295e2fb7357123cd81cb3e43c485930d74
|
[
"Apache-2.0"
] | null | null | null |
unsupervised_learning/clustering/agglomerative_clustering.py
|
cesar0205/machine_learning_algorithms
|
7cd7c7295e2fb7357123cd81cb3e43c485930d74
|
[
"Apache-2.0"
] | null | null | null |
import numpy as np
import matplotlib.pyplot as plt
from scipy.cluster.hierarchy import dendrogram as scipy_dendrogram
from itertools import combinations
from scipy.spatial.distance import euclidean
import copy
'''
Abstract class that different linkeage implementations will use to link two clusters.
'''
class AbstractLinkage():
def alpha_fn(self, ni, nj, nk):
raise NotImplementedError();
def beta_fn(self, ni, nj, nk):
raise NotImplementedError();
def gamma_fn(self, ni, nj, nk):
raise NotImplementedError();
#Links two clusters given indexes i, j. Updates the distance matrix eliminating cluster j and updates the distance
#between the new cluster with the rest of the clusters.
def link(self, dist_matrix, dendrogram, i, j):
m = len(dist_matrix);
for k in range(m):
if k != i and k != j:
distance = self.compute_distance(dist_matrix, dendrogram, i, j, k);
dist_matrix[i, k] = distance;
dist_matrix[k, i] = distance;
indexes = np.delete(range(m), j);
return dist_matrix.take(indexes, axis=0).take(indexes, axis=1);
def compute_distance(self, dist_matrix, dendrogram, i, j, k):
ni = len(dendrogram[i]);
nj = len(dendrogram[j]);
nk = len(dendrogram[k]);
alpha_i = self.alpha_fn(ni, nj, nk);
alpha_j = self.alpha_fn(nj, ni, nk);
beta = self.beta_fn(ni, nj, nk);
gamma = self.gamma_fn(ni, nj, nk);
d_ik = dist_matrix[i, k];
d_jk = dist_matrix[j, k];
d_ij = dist_matrix[i, j];
return alpha_i * d_ik + alpha_j * d_jk + beta * d_ij + gamma * np.abs(d_ik - d_jk);
#Single linkage parameter definition
class SingleLinkage(AbstractLinkage):
def alpha_fn(self, ni, nj, nk):
return 0.5
def beta_fn(self, ni, nj, nk):
return 0;
def gamma_fn(self, ni, nj, nk):
return 0.5
#Complete linkage parameter definition
class CompleteLinkage(AbstractLinkage):
def alpha_fn(self, ni, nj, nk):
return 0.5
def beta_fn(self, ni, nj, nk):
return 0;
def gamma_fn(self, ni, nj, nk):
return -0.5
#Ward linkage parameter definition
class WardLinkage(AbstractLinkage):
def alpha_fn(self, ni, nj, nk):
return (ni + nk) / (ni + nj + nk)
def beta_fn(self, ni, nj, nk):
return (-nk) / (ni + nj + nk)
def gamma_fn(self, ni, nj, nk):
return 0
LINKAGES = {'single': SingleLinkage(),
'complete': CompleteLinkage(),
'ward': WardLinkage()}
'''
Dendrogram node implementation. A node is a merge of its children. It also stores the distance between its children.
'''
class DendrogramNode():
def __init__(self, id, children=None):
self.id = id;
self.children = children;
self.distance = 0;
#Returns the list of the original datapoints contained in this node.
def leaves(self):
if (self.children):
leaves = [];
for child in self.children:
leaves.extend(child.leaves())
return leaves;
else:
return [self];
#Recursive function that returns the children ids of this node along with their distance and number of datapoints
#below it.
def adjacency_list(self):
if (self.children):
al = [(self.id, self.children[0].id, self.children[1].id, self.distance, len(self))]
for child in self.children:
al.extend(child.adjacency_list());
return al;
else:
return [];
#Number of datapoints below this node.
def __len__(self):
return len(self.leaves())
'''
Implements the dendrogram for hierarchical clustering.
'''
class Dendrogram(list):
def __init__(self, items):
super().__init__(map(DendrogramNode, items));
self.n_clusters = len(items);
def merge(self, *indexes):
node = DendrogramNode(self.n_clusters, [self[i] for i in indexes]);
self.n_clusters += 1;
self[indexes[0]] = node;
for i in indexes[1:]:
del self[i];
def to_adjacency_matrix(self):
Z = self[0].adjacency_list();
Z.sort()
return np.array(Z)[:, 1:];
def draw(self, title=None):
fig = plt.figure();
ax = plt.gca();
A = self.to_adjacency_matrix();
scipy_dendrogram(A, color_threshold=0.7 * max(A[:, 2]));
ax.spines["bottom"].set_visible(False)
ax.spines["top"].set_visible(False)
ax.spines["left"].set_visible(False)
ax.spines["right"].set_visible(False)
fig.suptitle(title, fontsize=12);
plt.show();
'''
Stores the distance matrix with distances between clusters.
'''
class DistanceMatrix():
def __init__(self, X, linkage=SingleLinkage(), distance_metric=euclidean):
self.linkage = linkage;
self.distance_metric = distance_metric;
self.dist_matrix = self.build_distance_matrix(X)
self.dendrogram = Dendrogram([i for i in range(len(self.dist_matrix))]);
#Looks for the smallest distance between clusters and returns the corresponding cluster indexes
def get_smallest_distance(self):
m = len(self.dist_matrix)
i, j = np.unravel_index(np.argmin(self.dist_matrix), (m, m));
return self.dist_matrix[i, j], i, j
#Merges two clusters and updating the distance matrix and the dendrogram.
def merge_clusters(self, i, j, distance):
#print(self.linkage.__class__.__name__)
self.dist_matrix = self.linkage.link(self.dist_matrix, self.dendrogram, i, j);
self.dendrogram.merge(i, j)
self.dendrogram[i].distance = distance;
#Computes the distance matrix using the datapoints in X.
def build_distance_matrix(self, X):
m = len(X);
dist_matrix = np.zeros((m, m));
for i, j in combinations(range(m), 2):
dist_matrix[i, j] = self.distance_metric(X[i], X[j]);
dist_matrix[j, i] = dist_matrix[i, j]
#Fill in the diagonal with np.inf so to eliminate their zero distances.
np.fill_diagonal(dist_matrix, np.inf);
return dist_matrix;
def __len__(self):
return len(self.dist_matrix);
'''
Agglomemative clustering implementation.
'''
class AgglomerativeClustering():
def __init__(self, linkage='single', distance_metric=euclidean, distance_thres=1.2):
'''
:param linkage: Clustering linkage: single, complete, ward
:param distance_metric: distance metric to measure distance between clusters
:param distance_thres: Threshold to stop clustering and return the clusters found.
'''
self.linkage = LINKAGES[linkage];
self.distance_metric = distance_metric;
self.distance_thres = distance_thres;
def fit(self, X):
m = len(X)
self.dist_matrix = DistanceMatrix(X, linkage=self.linkage);
dendrogram_thres = None;
#While there are more than 1 cluster left.
while (len(self.dist_matrix) > 1):
distance, i, j = self.dist_matrix.get_smallest_distance();
if (distance > self.distance_thres and dendrogram_thres is None):
#When the threshold is reached, save the dendogram into memory for posterior visualization.
dendrogram_thres = copy.copy(self.dist_matrix.dendrogram);
self.dist_matrix.merge_clusters(i, j, distance);
clusters = [];
#Transform the dendrogram into a list of clusters.
for node in dendrogram_thres:
cluster = [node.id for node in node.leaves()]
clusters.append(cluster);
self.labels_ = np.full(m, -1);
for cluster_id, cluster in enumerate(clusters):
for x_ind in cluster:
self.labels_[x_ind] = cluster_id;
def dendrogram(self):
return self.dist_matrix.dendrogram;
| 32.283401
| 121
| 0.625031
|
cfacd883d333bfa3b28161034faa34ff506f3ad1
| 506
|
py
|
Python
|
spotify_controller.py
|
koshini/spotify-gesture-controller
|
e4d0481848604754b9ef639d1cbb22a498e9b862
|
[
"MIT"
] | null | null | null |
spotify_controller.py
|
koshini/spotify-gesture-controller
|
e4d0481848604754b9ef639d1cbb22a498e9b862
|
[
"MIT"
] | null | null | null |
spotify_controller.py
|
koshini/spotify-gesture-controller
|
e4d0481848604754b9ef639d1cbb22a498e9b862
|
[
"MIT"
] | null | null | null |
import spotipy
class SpotifyController():
def __init__(self, TOKEN, DEVICEID):
self.sp = spotipy.Spotify(auth=TOKEN)
self.device_id = DEVICEID
def pause(self):
self.sp.pause_playback(device_id=self.device_id)
def play(self):
self.sp.start_playback(device_id=self.device_id)
def next_track(self):
self.sp.next_track(device_id=self.device_id)
def previous_track(self):
self.sp.previous_track(device_id=self.device_id)
| 26.631579
| 58
| 0.667984
|
46a6404d7efef9e75de5556d300c1c4c8cedfdf2
| 6,556
|
py
|
Python
|
Vault7/Lost-in-Translation/windows/Resources/Ops/PyScripts/lib/xops/ip.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | 46
|
2017-05-15T11:15:08.000Z
|
2018-07-02T03:32:52.000Z
|
Vault7/Lost-in-Translation/windows/Resources/Ops/PyScripts/lib/xops/ip.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | null | null | null |
Vault7/Lost-in-Translation/windows/Resources/Ops/PyScripts/lib/xops/ip.py
|
dendisuhubdy/grokmachine
|
120a21a25c2730ed356739231ec8b99fc0575c8b
|
[
"BSD-3-Clause"
] | 24
|
2017-05-17T03:26:17.000Z
|
2018-07-09T07:00:50.000Z
|
from __future__ import division
from math import floor
def get_cidr_from_subnet(subnet):
if (not validate_ipv4(subnet)):
raise ValueError, 'Subnet must be valid.'
subnetsplit = subnet.split('.')
cidr = 0
for oct in subnetsplit:
cidr = (cidr + list(bin(int(oct))).count('1'))
return cidr
def get_subnet_from_cidr(cidr):
if (not ((type(cidr) is int) or (type(cidr) is long))):
raise TypeError, 'Value must be an integer or a long.'
num = 0
for i in range(0, cidr):
num = (num | (2 ** (31 - i)))
return get_ip_from_int(num)
def get_ip_from_int(num):
if (not ((type(num) is int) or (type(num) is long))):
raise TypeError, 'Value must be an integer or a long.'
one = floor((num // (2 ** 24)))
two = floor(((num - (one * (2 ** 24))) // (2 ** 16)))
three = floor((((num - (one * (2 ** 24))) - (two * (2 ** 16))) // (2 ** 8)))
four = (((num - (one * (2 ** 24))) - (two * (2 ** 16))) - (three * (2 ** 8)))
if validate_ipv4(('%d.%d.%d.%d' % (one, two, three, four))):
return ('%d.%d.%d.%d' % (one, two, three, four))
else:
return False
def get_ip_from_hex_str(data):
if (not (isinstance(data, str) or isinstance(data, unicode))):
raise TypeError, 'Must supply a hex string.'
if (len(data) != 8):
raise ValueError, 'Hex string must be in 8 characters in length'
one = ((int(data[0], 16) * 16) + int(data[1], 16))
two = ((int(data[2], 16) * 16) + int(data[3], 16))
three = ((int(data[4], 16) * 16) + int(data[5], 16))
four = ((int(data[6], 16) * 16) + int(data[7], 16))
if validate_ipv4(('%s.%s.%s.%s' % (one, two, three, four))):
return ('%s.%s.%s.%s' % (one, two, three, four))
else:
return False
def get_int_from_ip(ip):
if (not validate_ipv4(ip)):
raise ValueError, 'IP must be valid.'
splitwork = ip.split('.')
if (len(splitwork) != 4):
return ip
return ((((int(splitwork[0]) * (2 ** 24)) + (int(splitwork[1]) * (2 ** 16))) + (int(splitwork[2]) * (2 ** 8))) + int(splitwork[3]))
def expand_ipv6(address):
if (not validate_ipv6(address)):
raise ValueError, 'Address must be a IPv6 notation.'
half = address.split('::')
if (len(half) == 2):
half[0] = half[0].split(':')
half[1] = half[1].split(':')
nodes = ((half[0] + (['0'] * (8 - (len(half[0]) + len(half[1]))))) + half[1])
else:
nodes = half[0].split(':')
return ':'.join((('%04x' % int((i or '0'), 16)) for i in nodes))
def get_broadcast_from_subnet(ip, subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4_subnet(subnet)):
raise TypeError, 'Subnet must be a valid subnet mask.'
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (not validate_ipv4(ip)):
raise TypeError, 'IP must be a valid IP address.'
network = get_network_from_subnet(ip, subnet)
net_split = network.split('.')
sub_split = subnet.split('.')
broadcast = []
for i in range(0, 4):
broadcast.append(str((int(net_split[i]) | (int(sub_split[i]) ^ 255))))
return '.'.join(broadcast)
def get_network_from_subnet(ip, subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4_subnet(subnet)):
raise TypeError, 'Subnet must be a valid subnet mask.'
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (not validate_ipv4(ip)):
raise TypeError, 'IP must be a valid IP address.'
ip_split = ip.split('.')
sub_split = subnet.split('.')
network = []
for i in range(0, 4):
network.append(str((int(ip_split[i]) & int(sub_split[i]))))
return '.'.join(network)
def validate_ipv4_subnet(subnet):
if (not ((type(subnet) is str) or (type(subnet) is unicode))):
raise TypeError, 'Subnet must be a string representation.'
if (not validate_ipv4(subnet)):
return False
found_zero = False
for item in subnet.split('.'):
if ((not found_zero) and (item == '255')):
continue
if (found_zero and (not (item == '0'))):
return False
digit = int(item)
for i in range(0, 8):
if ((digit & (2 ** (7 - i))) == 0):
found_zero = True
elif found_zero:
return False
return True
def validate_ipv4(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
octets = ip.split('.')
if (len(octets) != 4):
return False
for octet in octets:
try:
i = int(octet)
except ValueError:
return False
if ((i < 0) or (i > 255)):
return False
else:
return True
def validate_ipv6(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
nodes = ip.split('%')
if (len(nodes) not in [1, 2]):
return False
addr = nodes[0]
if (len(nodes) == 2):
try:
int(nodes[1])
except ValueError:
return False
if (addr.count('::') > 1):
return False
groups = addr.split(':')
if ((len(groups) > 8) or (len(groups) < 3)):
return False
for group in groups:
if (group == ''):
continue
try:
i = int(group, 16)
except ValueError:
return False
if ((i < 0) or (i > 65535)):
return False
else:
return True
def validate(ip):
if (not ((type(ip) is str) or (type(ip) is unicode))):
raise TypeError, 'IP must be a string representation.'
if (':' in ip):
return validate_ipv6(ip)
elif ('.' in ip):
return validate_ipv4(ip)
else:
return False
def validate_port(port):
if (not ((type(port) is int) or (type(port) is long))):
raise TypeError, 'Port must be an int or long representation.'
if ((port >= 0) and (port <= 65535)):
return True
return False
| 36.422222
| 136
| 0.540879
|
38aaa296fe8220d96167c8ffa5fb484d86235a2f
| 506
|
py
|
Python
|
zsolozsma/migrations/0029_auto_20210313_1334.py
|
molnarm/liturgia.tv
|
3c78de6dcec65262ae9e272188dbda92f71a7c15
|
[
"MIT"
] | 4
|
2020-05-04T10:52:35.000Z
|
2020-12-03T22:01:09.000Z
|
zsolozsma/migrations/0029_auto_20210313_1334.py
|
molnarm/liturgia.tv
|
3c78de6dcec65262ae9e272188dbda92f71a7c15
|
[
"MIT"
] | 27
|
2021-02-18T19:41:24.000Z
|
2021-11-23T05:01:00.000Z
|
zsolozsma/migrations/0029_auto_20210313_1334.py
|
molnarm/zsolozsma
|
a260af574b38687cc5955bbbf76caabc779f09c4
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.7 on 2021-03-13 12:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('zsolozsma', '0028_liturgy_duration'),
]
operations = [
migrations.AlterField(
model_name='liturgy',
name='duration',
field=models.IntegerField(default=60, help_text='Szokásos időtartam, egyes helyszíneken lehet eltérő.', verbose_name='Időtartam (perc)'),
),
]
| 26.631579
| 150
| 0.618577
|
c25a4db48e24da067398f7f9b54ed006f3009d7e
| 2,660
|
py
|
Python
|
python/opscore/RO/ParseMsg/GetKeyword.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
python/opscore/RO/ParseMsg/GetKeyword.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | 1
|
2021-08-17T21:08:14.000Z
|
2021-08-17T21:08:14.000Z
|
python/opscore/RO/ParseMsg/GetKeyword.py
|
sdss/opscore
|
dd4f2b2ad525fe3dfe3565463de2c079a7e1232e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
History:
2004-05-18 ROwen Modified test code to use astr instead of str
and adict instead of dict.
2015-09-24 ROwen Replace "== None" with "is None" to modernize the code.
2015-11-03 ROwen Replace "!= None" with "is not None" to modernize the code.
"""
from __future__ import print_function
__all__ = ["getKeyword"]
import re
ptn = re.compile(r'\s*(?P<key>[a-zA-Z_][a-zA-Z0-9_]*)(?:\s*$|(?:\s*(?P<next>[=;])))')
def getKeyword(astr, begInd=0):
"""
Returns the next keyword from an APO format message. Keywords must start
with a letter or underscore and may contain those characters or digits thereafter.
Inputs:
astr: the string to parse
begInd: the starting index; must point to the beginning of the keyword
to be extracted, though leading white space is ignored.
Returns a duple containing:
the next keyword
the index to the next token (should be "=" or ";"), or None of end-of-string
Exceptions:
if the next non-whitespace thing is not a keyword, throws a SyntaxError
"""
mo = ptn.match(astr, begInd)
if mo is None:
raise SyntaxError("not a keyword starting at %d in :%s:" % \
(begInd,astr))
keyword = mo.group('key')
(nextInd, junk) = mo.span('next')
if nextInd < 0:
nextInd = None
return (keyword, nextInd)
if __name__ == '__main__':
# perform test
print("testing getKeyword\n")
testList = [
("text = 'test'", 0),
("text2 = 'test'", 0),
("skipme, text = 'test'", 8),
("text='test'", 0),
("text ;", 0),
("text;", 0),
("text=;", 0),
("text = ;", 0),
("text=", 0),
("text = ", 0),
("text", 0),
("_leadingUnderscore = 'test'", 0),
(" _leadingWhitespace = 'test'", 0),
("text x 'bad character after keyword'", 0),
("text , 'bad character after keyword'", 0),
("text, 'bad character immediately after keyword'", 0),
("0badKeyStart = 'test'", 0),
(", badFirstChar = 'test'", 0),
("; badFirstChar = 'test'", 0),
("'badKeyStart' = 'starts with single quote'", 0),
]
for (astr, nextInd) in testList:
try:
(adict, nextInd) = getKeyword(astr, nextInd)
print("getKeyword('%s') = \"%s\";" % (astr, adict), end=' ')
if nextInd is not None:
print("astr[%d] = \"%s\"" % (nextInd, astr[nextInd]))
else:
print("end of text")
except Exception as e:
print("failed with error: %s" % (e))
| 32.048193
| 86
| 0.546617
|
685462452ed9294af2ee01a2766262371a62de07
| 935
|
py
|
Python
|
saleor/discount/migrations/0008_auto_20180223_1018.py
|
3d510/saleor-easyship
|
e094141bd217f62168c77965a1844426c592a6ef
|
[
"BSD-3-Clause"
] | null | null | null |
saleor/discount/migrations/0008_auto_20180223_1018.py
|
3d510/saleor-easyship
|
e094141bd217f62168c77965a1844426c592a6ef
|
[
"BSD-3-Clause"
] | null | null | null |
saleor/discount/migrations/0008_auto_20180223_1018.py
|
3d510/saleor-easyship
|
e094141bd217f62168c77965a1844426c592a6ef
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 2.0.2 on 2018-02-23 16:18
from django.db import migrations, models
import django_prices.models
class Migration(migrations.Migration):
dependencies = [
('discount', '0007_auto_20180108_0814'),
]
operations = [
migrations.AlterField(
model_name='sale',
name='type',
field=models.CharField(choices=[('fixed', 'SGD'), ('percentage', '%')], default='fixed', max_length=10),
),
migrations.AlterField(
model_name='voucher',
name='discount_value_type',
field=models.CharField(choices=[('fixed', 'SGD'), ('percentage', '%')], default='fixed', max_length=10),
),
migrations.AlterField(
model_name='voucher',
name='limit',
field=django_prices.models.PriceField(blank=True, currency='SGD', decimal_places=2, max_digits=12, null=True),
),
]
| 31.166667
| 122
| 0.592513
|
d8fb310a7fc557644874762a8d8e31c72e4a1137
| 8,317
|
py
|
Python
|
libcloud/test/compute/test_cloudsigma_v1_0.py
|
atsaki/libcloud
|
ae85479e835494e196e2f6e79aae9a475603d8ac
|
[
"Apache-2.0"
] | 3
|
2015-09-11T15:42:16.000Z
|
2021-05-12T01:10:05.000Z
|
libcloud/test/compute/test_cloudsigma_v1_0.py
|
atsaki/libcloud
|
ae85479e835494e196e2f6e79aae9a475603d8ac
|
[
"Apache-2.0"
] | 1
|
2021-09-11T14:30:32.000Z
|
2021-09-11T14:30:32.000Z
|
libcloud/test/compute/test_cloudsigma_v1_0.py
|
atsaki/libcloud
|
ae85479e835494e196e2f6e79aae9a475603d8ac
|
[
"Apache-2.0"
] | 3
|
2016-02-08T23:38:18.000Z
|
2019-11-05T00:31:34.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.compute.base import Node
from libcloud.compute.drivers.cloudsigma import CloudSigmaNodeDriver
from libcloud.compute.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.utils.misc import str2dicts, str2list, dict2str
from libcloud.test import MockHttp
from libcloud.test.file_fixtures import ComputeFileFixtures
class CloudSigmaAPI10BaseTestCase(object):
should_list_locations = False
driver_klass = CloudSigmaZrhNodeDriver
driver_kwargs = {}
def setUp(self):
self.driver = self.driver_klass(*self.driver_args,
**self.driver_kwargs)
self.driver.connectionCls.conn_classes = (None,
CloudSigmaHttp)
def test_list_nodes(self):
nodes = self.driver.list_nodes()
self.assertTrue(isinstance(nodes, list))
self.assertEqual(len(nodes), 1)
node = nodes[0]
self.assertEqual(node.public_ips[0], "1.2.3.4")
self.assertEqual(node.extra['smp'], 1)
self.assertEqual(node.extra['cpu'], 1100)
self.assertEqual(node.extra['mem'], 640)
def test_list_sizes(self):
images = self.driver.list_sizes()
self.assertEqual(len(images), 9)
def test_list_images(self):
sizes = self.driver.list_images()
self.assertEqual(len(sizes), 10)
def test_start_node(self):
nodes = self.driver.list_nodes()
node = nodes[0]
self.assertTrue(self.driver.ex_start_node(node))
def test_shutdown_node(self):
nodes = self.driver.list_nodes()
node = nodes[0]
self.assertTrue(self.driver.ex_stop_node(node))
self.assertTrue(self.driver.ex_shutdown_node(node))
def test_reboot_node(self):
node = self.driver.list_nodes()[0]
self.assertTrue(self.driver.reboot_node(node))
def test_destroy_node(self):
node = self.driver.list_nodes()[0]
self.assertTrue(self.driver.destroy_node(node))
self.driver.list_nodes()
def test_create_node(self):
size = self.driver.list_sizes()[0]
image = self.driver.list_images()[0]
node = self.driver.create_node(
name="cloudsigma node", image=image, size=size)
self.assertTrue(isinstance(node, Node))
def test_ex_static_ip_list(self):
ips = self.driver.ex_static_ip_list()
self.assertEqual(len(ips), 3)
def test_ex_static_ip_create(self):
result = self.driver.ex_static_ip_create()
self.assertEqual(len(result), 2)
self.assertEqual(len(list(result[0].keys())), 6)
self.assertEqual(len(list(result[1].keys())), 6)
def test_ex_static_ip_destroy(self):
result = self.driver.ex_static_ip_destroy('1.2.3.4')
self.assertTrue(result)
def test_ex_drives_list(self):
result = self.driver.ex_drives_list()
self.assertEqual(len(result), 2)
def test_ex_drive_destroy(self):
result = self.driver.ex_drive_destroy(
# @@TR: this should be soft-coded:
'd18119ce_7afa_474a_9242_e0384b160220')
self.assertTrue(result)
def test_ex_set_node_configuration(self):
node = self.driver.list_nodes()[0]
result = self.driver.ex_set_node_configuration(node, **{'smp': 2})
self.assertTrue(result)
def test_str2dicts(self):
string = 'mem 1024\ncpu 2200\n\nmem2048\cpu 1100'
result = str2dicts(string)
self.assertEqual(len(result), 2)
def test_str2list(self):
string = 'ip 1.2.3.4\nip 1.2.3.5\nip 1.2.3.6'
result = str2list(string)
self.assertEqual(len(result), 3)
self.assertEqual(result[0], '1.2.3.4')
self.assertEqual(result[1], '1.2.3.5')
self.assertEqual(result[2], '1.2.3.6')
def test_dict2str(self):
d = {'smp': 5, 'cpu': 2200, 'mem': 1024}
result = dict2str(d)
self.assertTrue(len(result) > 0)
self.assertTrue(result.find('smp 5') >= 0)
self.assertTrue(result.find('cpu 2200') >= 0)
self.assertTrue(result.find('mem 1024') >= 0)
class CloudSigmaAPI10DirectTestCase(CloudSigmaAPI10BaseTestCase,
unittest.TestCase):
driver_klass = CloudSigmaZrhNodeDriver
driver_args = ('foo', 'bar')
driver_kwargs = {}
class CloudSigmaAPI10IndiretTestCase(CloudSigmaAPI10BaseTestCase,
unittest.TestCase):
driver_klass = CloudSigmaNodeDriver
driver_args = ('foo', 'bar')
driver_kwargs = {'api_version': '1.0'}
class CloudSigmaHttp(MockHttp):
fixtures = ComputeFileFixtures('cloudsigma')
def _drives_standard_info(self, method, url, body, headers):
body = self.fixtures.load('drives_standard_info.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _servers_62fe7cde_4fb9_4c63_bd8c_e757930066a0_start(
self, method, url, body, headers):
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _servers_62fe7cde_4fb9_4c63_bd8c_e757930066a0_stop(
self, method, url, body, headers):
return (httplib.NO_CONTENT, body, {}, httplib.responses[httplib.OK])
def _servers_62fe7cde_4fb9_4c63_bd8c_e757930066a0_destroy(
self, method, url, body, headers):
return (httplib.NO_CONTENT,
body, {}, httplib.responses[httplib.NO_CONTENT])
def _drives_d18119ce_7afa_474a_9242_e0384b160220_clone(
self, method, url, body, headers):
body = self.fixtures.load('drives_clone.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _drives_a814def5_1789_49a0_bf88_7abe7bb1682a_info(
self, method, url, body, headers):
body = self.fixtures.load('drives_single_info.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _drives_info(self, method, url, body, headers):
body = self.fixtures.load('drives_info.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _servers_create(self, method, url, body, headers):
body = self.fixtures.load('servers_create.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _servers_info(self, method, url, body, headers):
body = self.fixtures.load('servers_info.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _resources_ip_list(self, method, url, body, headers):
body = self.fixtures.load('resources_ip_list.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _resources_ip_create(self, method, url, body, headers):
body = self.fixtures.load('resources_ip_create.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _resources_ip_1_2_3_4_destroy(self, method, url, body, headers):
return (httplib.NO_CONTENT, body, {}, httplib.responses[httplib.OK])
def _drives_d18119ce_7afa_474a_9242_e0384b160220_destroy(
self, method, url, body, headers):
return (httplib.NO_CONTENT, body, {}, httplib.responses[httplib.OK])
def _servers_62fe7cde_4fb9_4c63_bd8c_e757930066a0_set(
self, method, url, body, headers):
body = self.fixtures.load('servers_set.txt')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
| 37.129464
| 76
| 0.668991
|
eeb3976bfcd57bcee65f02876ecd8ab7215ba2cc
| 14,541
|
py
|
Python
|
reagent/evaluation/weighted_sequential_doubly_robust_estimator.py
|
mcx/ReAgent
|
57b58a8b3a6b74bb87a197b73a6cd108ddad895e
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/evaluation/weighted_sequential_doubly_robust_estimator.py
|
mcx/ReAgent
|
57b58a8b3a6b74bb87a197b73a6cd108ddad895e
|
[
"BSD-3-Clause"
] | null | null | null |
reagent/evaluation/weighted_sequential_doubly_robust_estimator.py
|
mcx/ReAgent
|
57b58a8b3a6b74bb87a197b73a6cd108ddad895e
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import itertools
import logging
import numpy as np
import scipy as sp
import torch
from reagent.evaluation.cpe import CpeEstimate
from reagent.evaluation.evaluation_data_page import EvaluationDataPage
logger = logging.getLogger(__name__)
class WeightedSequentialDoublyRobustEstimator:
NUM_SUBSETS_FOR_CB_ESTIMATES = 25
CONFIDENCE_INTERVAL = 0.9
NUM_BOOTSTRAP_SAMPLES = 50
BOOTSTRAP_SAMPLE_PCT = 0.5
def __init__(self, gamma):
self.gamma = gamma
def estimate(
self,
edp: EvaluationDataPage,
num_j_steps,
whether_self_normalize_importance_weights,
) -> CpeEstimate:
# For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8
assert edp.model_values is not None
(
actions,
rewards,
logged_propensities,
target_propensities,
estimated_q_values,
) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories(
edp.mdp_id,
edp.action_mask.cpu().numpy(),
edp.logged_rewards.cpu().numpy().flatten(),
edp.logged_propensities.cpu().numpy().flatten(),
edp.model_propensities.cpu().numpy(),
# pyre-ignore [16]: Optional type has no attribute `cpu`
edp.model_values.cpu().numpy(),
)
num_trajectories = actions.shape[0]
trajectory_length = actions.shape[1]
j_steps = [float("inf")]
if num_j_steps > 1:
j_steps.append(-1)
if num_j_steps > 2:
interval = trajectory_length // (num_j_steps - 1)
j_steps.extend([i * interval for i in range(1, num_j_steps - 1)])
target_propensity_for_logged_action = np.sum(
np.multiply(target_propensities, actions), axis=2
)
estimated_q_values_for_logged_action = np.sum(
np.multiply(estimated_q_values, actions), axis=2
)
estimated_state_values = np.sum(
np.multiply(target_propensities, estimated_q_values), axis=2
)
importance_weights = target_propensity_for_logged_action / logged_propensities
importance_weights = np.cumprod(importance_weights, axis=1)
importance_weights = (
WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
)
)
importance_weights_one_earlier = (
np.ones([num_trajectories, 1]) * 1.0 / num_trajectories
)
importance_weights_one_earlier = np.hstack(
[importance_weights_one_earlier, importance_weights[:, :-1]]
)
discounts = np.logspace(
start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma
)
j_step_return_trajectories = []
for j_step in j_steps:
j_step_return_trajectories.append(
WeightedSequentialDoublyRobustEstimator.calculate_step_return(
rewards,
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values,
estimated_q_values_for_logged_action,
j_step,
)
)
j_step_return_trajectories = np.array(j_step_return_trajectories)
j_step_returns = np.sum(j_step_return_trajectories, axis=1)
if len(j_step_returns) == 1:
weighted_doubly_robust = j_step_returns[0]
weighted_doubly_robust_std_error = 0.0
else:
# break trajectories into several subsets to estimate confidence bounds
infinite_step_returns = []
num_subsets = int(
min(
num_trajectories / 2,
WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES,
)
)
interval = num_trajectories / num_subsets
for i in range(num_subsets):
trajectory_subset = np.arange(
int(i * interval), int((i + 1) * interval)
)
importance_weights = (
target_propensity_for_logged_action[trajectory_subset]
/ logged_propensities[trajectory_subset]
)
importance_weights = np.cumprod(importance_weights, axis=1)
importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
)
importance_weights_one_earlier = (
np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset)
)
importance_weights_one_earlier = np.hstack(
[importance_weights_one_earlier, importance_weights[:, :-1]]
)
infinite_step_return = np.sum(
WeightedSequentialDoublyRobustEstimator.calculate_step_return(
rewards[trajectory_subset],
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values[trajectory_subset],
estimated_q_values_for_logged_action[trajectory_subset],
float("inf"),
)
)
infinite_step_returns.append(infinite_step_return)
# Compute weighted_doubly_robust mean point estimate using all data
weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate(
j_steps,
num_j_steps,
j_step_returns,
infinite_step_returns,
j_step_return_trajectories,
)
# Use bootstrapping to compute weighted_doubly_robust standard error
bootstrapped_means = []
sample_size = int(
WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT
* num_subsets
)
for _ in range(
WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES
):
random_idxs = np.random.choice(num_j_steps, sample_size, replace=False)
random_idxs.sort()
wdr_estimate = self.compute_weighted_doubly_robust_point_estimate(
j_steps=[j_steps[i] for i in random_idxs],
num_j_steps=sample_size,
j_step_returns=j_step_returns[random_idxs],
infinite_step_returns=infinite_step_returns,
j_step_return_trajectories=j_step_return_trajectories[random_idxs],
)
bootstrapped_means.append(wdr_estimate)
weighted_doubly_robust_std_error = np.std(bootstrapped_means)
episode_values = np.sum(np.multiply(rewards, discounts), axis=1)
logged_policy_score = np.nanmean(episode_values)
if logged_policy_score < 1e-6:
logger.warning(
"Can't normalize WSDR-CPE because of small or negative logged_policy_score"
)
return CpeEstimate(
raw=weighted_doubly_robust,
normalized=0.0,
raw_std_error=weighted_doubly_robust_std_error,
normalized_std_error=0.0,
)
return CpeEstimate(
raw=weighted_doubly_robust,
normalized=weighted_doubly_robust / logged_policy_score,
raw_std_error=weighted_doubly_robust_std_error,
normalized_std_error=weighted_doubly_robust_std_error / logged_policy_score,
)
def compute_weighted_doubly_robust_point_estimate(
self,
j_steps,
num_j_steps,
j_step_returns,
infinite_step_returns,
j_step_return_trajectories,
):
(
low_bound,
high_bound,
) = WeightedSequentialDoublyRobustEstimator.confidence_bounds(
infinite_step_returns,
WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL,
)
# decompose error into bias + variance
j_step_bias = np.zeros([num_j_steps])
where_lower = np.where(j_step_returns < low_bound)[0]
j_step_bias[where_lower] = low_bound - j_step_returns[where_lower]
where_higher = np.where(j_step_returns > high_bound)[0]
j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound
covariance = np.cov(j_step_return_trajectories)
error = covariance + j_step_bias.T * j_step_bias
# minimize mse error
constraint = {"type": "eq", "fun": lambda x: np.sum(x) - 1.0}
x = np.zeros([len(j_steps)])
res = sp.optimize.minimize(
mse_loss,
x,
args=error,
constraints=constraint,
bounds=[(0, 1) for _ in range(x.shape[0])],
)
x = np.array(res.x)
return float(np.dot(x, j_step_returns))
@staticmethod
def transform_to_equal_length_trajectories(
mdp_ids,
actions,
rewards,
logged_propensities,
target_propensities,
estimated_q_values,
):
"""
Take in samples (action, rewards, propensities, etc.) and output lists
of equal-length trajectories (episodes) according to terminals.
As the raw trajectories are of various lengths, the shorter ones are
filled with zeros(ones) at the end.
"""
num_actions = len(target_propensities[0])
terminals = np.zeros(mdp_ids.shape[0])
for x in range(0, mdp_ids.shape[0]):
if x + 1 == mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x + 1, 0]:
terminals[x] = 1
trajectories = []
episode_start = 0
episode_ends = np.nonzero(terminals)[0]
if len(terminals) - 1 not in episode_ends:
episode_ends = np.append(episode_ends, len(terminals) - 1)
for episode_end in episode_ends:
trajectories.append(np.arange(episode_start, episode_end + 1))
episode_start = episode_end + 1
action_trajectories = []
reward_trajectories = []
logged_propensity_trajectories = []
target_propensity_trajectories = []
Q_value_trajectories = []
for trajectory in trajectories:
action_trajectories.append(actions[trajectory])
reward_trajectories.append(rewards[trajectory])
logged_propensity_trajectories.append(logged_propensities[trajectory])
target_propensity_trajectories.append(target_propensities[trajectory])
Q_value_trajectories.append(estimated_q_values[trajectory])
def to_equal_length(x, fill_value):
x_equal_length = np.array(
list(itertools.zip_longest(*x, fillvalue=fill_value))
).swapaxes(0, 1)
return x_equal_length
action_trajectories = to_equal_length(
action_trajectories, np.zeros([num_actions])
)
reward_trajectories = to_equal_length(reward_trajectories, 0)
logged_propensity_trajectories = to_equal_length(
logged_propensity_trajectories, 1
)
target_propensity_trajectories = to_equal_length(
target_propensity_trajectories, np.zeros([num_actions])
)
Q_value_trajectories = to_equal_length(
Q_value_trajectories, np.zeros([num_actions])
)
return (
action_trajectories,
reward_trajectories,
logged_propensity_trajectories,
target_propensity_trajectories,
Q_value_trajectories,
)
@staticmethod
def normalize_importance_weights(
importance_weights, whether_self_normalize_importance_weights
):
if whether_self_normalize_importance_weights:
sum_importance_weights = np.sum(importance_weights, axis=0)
where_zeros = np.where(sum_importance_weights == 0.0)[0]
sum_importance_weights[where_zeros] = len(importance_weights)
importance_weights[:, where_zeros] = 1.0
importance_weights /= sum_importance_weights
return importance_weights
else:
importance_weights /= importance_weights.shape[0]
return importance_weights
@staticmethod
def calculate_step_return(
rewards,
discounts,
importance_weights,
importance_weights_one_earlier,
estimated_state_values,
estimated_q_values,
j_step,
):
trajectory_length = len(rewards[0])
num_trajectories = len(rewards)
j_step = int(min(j_step, trajectory_length - 1))
weighted_discounts = np.multiply(discounts, importance_weights)
weighted_discounts_one_earlier = np.multiply(
discounts, importance_weights_one_earlier
)
importance_sampled_cumulative_reward = np.sum(
np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, : j_step + 1]),
axis=1,
)
if j_step < trajectory_length - 1:
direct_method_value = (
weighted_discounts_one_earlier[:, j_step + 1]
* estimated_state_values[:, j_step + 1]
)
else:
direct_method_value = np.zeros([num_trajectories])
control_variate = np.sum(
np.multiply(
weighted_discounts[:, : j_step + 1], estimated_q_values[:, : j_step + 1]
)
- np.multiply(
weighted_discounts_one_earlier[:, : j_step + 1],
estimated_state_values[:, : j_step + 1],
),
axis=1,
)
j_step_return = (
importance_sampled_cumulative_reward + direct_method_value - control_variate
)
return j_step_return
@staticmethod
def confidence_bounds(x, confidence):
n = len(x)
m, se = np.mean(x), sp.stats.sem(x)
h = se * sp.stats.t._ppf((1 + confidence) / 2.0, n - 1)
return m - h, m + h
def mse_loss(x, error):
return np.dot(np.dot(x, error), x.T)
| 37.573643
| 106
| 0.610412
|
551006ca2bcf147dcfcfcfa4177442de9f5f357b
| 2,874
|
py
|
Python
|
salt/modules/elasticsearch.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | 3
|
2016-09-03T06:26:42.000Z
|
2019-06-30T13:04:53.000Z
|
salt/modules/elasticsearch.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | null | null | null |
salt/modules/elasticsearch.py
|
jkur/salt
|
3e62675550f9869d550d7787800270e632955d2f
|
[
"Apache-2.0"
] | 1
|
2020-10-19T11:49:50.000Z
|
2020-10-19T11:49:50.000Z
|
# -*- coding: utf-8 -*-
'''
Connection module for Elasticsearch
:depends: elasticsearch
'''
from __future__ import absolute_import
# Import Python libs
import logging
log = logging.getLogger(__name__)
# Import third party libs
try:
import elasticsearch
logging.getLogger('elasticsearch').setLevel(logging.CRITICAL)
HAS_ELASTICSEARCH = True
except ImportError:
HAS_ELASTICSEARCH = False
from salt.ext.six import string_types
def __virtual__():
'''
Only load if elasticsearch libraries exist.
'''
if not HAS_ELASTICSEARCH:
return False
return True
def exists(index, id, doc_type='_all', hosts=None, profile='elasticsearch'):
'''
Check for the existence of an elasticsearch document specified by id in the
index.
CLI example::
salt myminion elasticsearch.exists grafana-dash mydash profile='grafana'
'''
es = _get_instance(hosts, profile)
try:
return es.exists(index=index, id=id, doc_type=doc_type)
except elasticsearch.exceptions.NotFoundError:
return False
def index(index, doc_type, body, id=None, hosts=None, profile='elasticsearch'):
'''
Create or update an index with the specified body for the specified id.
CLI example::
salt myminion elasticsearch.index grafana-dash dashboard '{"user":"guest","group":"guest","body":"",...}' mydash profile='grafana'
'''
es = _get_instance(hosts, profile)
return es.index(index=index, doc_type=doc_type, body=body, id=id)
def get(index, id, doc_type='_all', hosts=None, profile='elasticsearch'):
'''
Get the contents of the specifed id from the index.
CLI example::
salt myminion elasticsearch.get grafana-dash mydash profile='grafana'
'''
es = _get_instance(hosts, profile)
return es.get(index=index, id=id, doc_type=doc_type)
def delete(index, doc_type, id, hosts=None, profile='elasticsearch'):
'''
Delete the document specified by the id in the index.
CLI example::
salt myminion elasticsearch.delete grafana-dash dashboard mydash profile='grafana'
'''
es = _get_instance(hosts, profile)
try:
es.delete(index=index, doc_type=doc_type, id=id)
return True
except elasticsearch.exceptions.NotFoundError:
return True
except Exception:
return False
def _get_instance(hosts, profile):
'''
Return the elasticsearch instance
'''
if profile:
if isinstance(profile, string_types):
_profile = __salt__['config.option'](profile)
elif isinstance(profile, dict):
_profile = profile
if _profile:
hosts = _profile.get('host')
if not hosts:
hosts = _profile.get('hosts')
if isinstance(hosts, string_types):
hosts = [hosts]
return elasticsearch.Elasticsearch(hosts)
| 26.611111
| 138
| 0.671886
|
9c6456e10c623a53995ea512cc51c8fec87d7c6f
| 2,690
|
py
|
Python
|
a4/utils.py
|
KIONLEE/cs224n
|
63054e187fb40d65af058673fe7aa2f22433da6e
|
[
"MIT"
] | null | null | null |
a4/utils.py
|
KIONLEE/cs224n
|
63054e187fb40d65af058673fe7aa2f22433da6e
|
[
"MIT"
] | null | null | null |
a4/utils.py
|
KIONLEE/cs224n
|
63054e187fb40d65af058673fe7aa2f22433da6e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
CS224N 2019-20: Homework 4
nmt.py: NMT Model
Pencheng Yin <pcyin@cs.cmu.edu>
Sahil Chopra <schopra8@stanford.edu>
Vera Lin <veralin@stanford.edu>
"""
import math
from typing import List
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import nltk
nltk.download('punkt')
def pad_sents(sents, pad_token):
""" Pad list of sentences according to the longest sentence in the batch.
The paddings should be at the end of each sentence.
@param sents (list[list[str]]): list of sentences, where each sentence
is represented as a list of words
@param pad_token (str): padding token
@returns sents_padded (list[list[str]]): list of sentences where sentences shorter
than the max length sentence are padded out with the pad_token, such that
each sentences in the batch now has equal length.
"""
sents_padded = []
### YOUR CODE HERE (~6 Lines)
max_len = 0
for sent in sents:
if len(sent) > max_len:
max_len = len(sent)
for sent in sents:
sents_padded.append(sent + [pad_token for i in range(max_len - len(sent))])
### END YOUR CODE
return sents_padded
def read_corpus(file_path, source):
""" Read file, where each sentence is dilineated by a `\n`.
@param file_path (str): path to file containing corpus
@param source (str): "tgt" or "src" indicating whether text
is of the source language or target language
"""
data = []
for line in open(file_path):
sent = nltk.word_tokenize(line)
# only append <s> and </s> to the target sentence
if source == 'tgt':
sent = ['<s>'] + sent + ['</s>']
data.append(sent)
return data
def batch_iter(data, batch_size, shuffle=False):
""" Yield batches of source and target sentences reverse sorted by length (largest to smallest).
@param data (list of (src_sent, tgt_sent)): list of tuples containing source and target sentence
@param batch_size (int): batch size
@param shuffle (boolean): whether to randomly shuffle the dataset
"""
batch_num = math.ceil(len(data) / batch_size)
index_array = list(range(len(data)))
if shuffle:
np.random.shuffle(index_array)
for i in range(batch_num):
indices = index_array[i * batch_size: (i + 1) * batch_size]
examples = [data[idx] for idx in indices]
examples = sorted(examples, key=lambda e: len(e[0]), reverse=True)
src_sents = [e[0] for e in examples]
tgt_sents = [e[1] for e in examples]
yield src_sents, tgt_sents
| 30.568182
| 100
| 0.649442
|
8e6e00acfce057ced702c737508c93c7efae612d
| 1,773
|
py
|
Python
|
3-semanticui-upload-image/app.py
|
rhcproc/tornado
|
de044044bfc33be535f9cba1c372f6b648d9472d
|
[
"MIT"
] | null | null | null |
3-semanticui-upload-image/app.py
|
rhcproc/tornado
|
de044044bfc33be535f9cba1c372f6b648d9472d
|
[
"MIT"
] | null | null | null |
3-semanticui-upload-image/app.py
|
rhcproc/tornado
|
de044044bfc33be535f9cba1c372f6b648d9472d
|
[
"MIT"
] | null | null | null |
#-*- coding:utf-8 -*-
import tornado.web
import tornado.httpserver
import tornado.ioloop
import os.path
from tornado.options import define, options
define("port", default=3000, help="run on the given port", type=int)
def get_picture_list():
import os
path = os.path.dirname( os.path.abspath( __file__ ))
picture_list = list()
for picture_name in os.listdir(path+"/static/pictures/"):
picture_list.append(dict(picture_name=picture_name))
return picture_list
class Application(tornado.web.Application):
def __init__(self):
base_dir = os.path.dirname(__file__)
settings = {
"template_path":os.path.join(base_dir, "templates"),
"static_path":os.path.join(base_dir, "static"),
"debug":True,
}
tornado.web.Application.__init__(self, [
tornado.web.url(r"/(favicon.ico)", tornado.web.StaticFileHandler, {"path":""}),
tornado.web.url(r"/", MainHandler, name="main"),
], **settings)
class MainHandler(tornado.web.RequestHandler):
def get(self):
#picture_list = utils.picture_manager.get_picture_list()
picture_list = get_picture_list()
self.render("index.html", picture_list = picture_list)
def post(self):
if self.request.files:
picture = self.request.files['picture'][0]
fp = open("static/pictures/"+picture["filename"], "wb")
fp.write(picture["body"])
fp.close()
else : pass
self.redirect("/")
def main():
tornado.options.parse_command_line()
Application().listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
print ("Open http://127.0.0.1:{}".format(options.port))
main()
| 30.568966
| 91
| 0.634518
|
9c1bbeb9a84e935024bdf99bd990a80b6730609c
| 5,809
|
py
|
Python
|
mistralclient/commands/v2/environments.py
|
lostmap/python-mistralclient
|
f77c1d23fb68b24d7406647b489f3f960026c46b
|
[
"Apache-2.0"
] | null | null | null |
mistralclient/commands/v2/environments.py
|
lostmap/python-mistralclient
|
f77c1d23fb68b24d7406647b489f3f960026c46b
|
[
"Apache-2.0"
] | null | null | null |
mistralclient/commands/v2/environments.py
|
lostmap/python-mistralclient
|
f77c1d23fb68b24d7406647b489f3f960026c46b
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
from oslo_serialization import jsonutils
from osc_lib.command import command
from mistralclient.commands.v2 import base
from mistralclient import utils
class EnvironmentFormatter(base.MistralFormatter):
COLUMNS = [
('name', 'Name'),
('description', 'Description'),
('variables', 'Variables'),
('scope', 'Scope'),
('created_at', 'Created at'),
('updated_at', 'Updated at'),
]
LIST_COLUMN_FIELD_NAMES = [c[0] for c in COLUMNS if c[0] != 'variables']
LIST_COLUMN_HEADING_NAMES = [c[1] for c in COLUMNS if c[0] != 'variables']
@staticmethod
def format(environment=None, lister=False):
if lister:
columns = EnvironmentFormatter.LIST_COLUMN_HEADING_NAMES
else:
columns = EnvironmentFormatter.headings()
if environment:
data = (
environment.name,)
if hasattr(environment, 'description'):
data += (environment.description or '<none>',)
else:
data += (None,)
if not lister:
data += (jsonutils.dumps(environment.variables, indent=4),)
data += (
environment.scope,
environment.created_at,)
if hasattr(environment, 'updated_at'):
data += (environment.updated_at or '<none>',)
else:
data += (None,)
else:
data = (tuple('' for _ in range(len(columns))),)
return columns, data
class List(base.MistralLister):
"""List all environments."""
def _get_format_function(self):
return EnvironmentFormatter.format_list
def _get_resources(self, parsed_args):
mistral_client = self.app.client_manager.workflow_engine
return mistral_client.environments.list(
marker=parsed_args.marker,
limit=parsed_args.limit,
sort_keys=parsed_args.sort_keys,
sort_dirs=parsed_args.sort_dirs,
fields=EnvironmentFormatter.fields(),
**base.get_filters(parsed_args)
)
class Get(command.ShowOne):
"""Show specific environment."""
def get_parser(self, prog_name):
parser = super(Get, self).get_parser(prog_name)
parser.add_argument(
'environment',
help='Environment name'
)
parser.add_argument(
'--export',
default=False,
action='store_true',
help='Export the environment suitable for import'
)
return parser
def take_action(self, parsed_args):
mistral_client = self.app.client_manager.workflow_engine
environment = mistral_client.environments.get(parsed_args.environment)
if parsed_args.export:
columns = ('name',
'description',
'scope',
'variables')
data = (environment.name,
environment.description,
environment.scope,
jsonutils.dumps(environment.variables))
return columns, data
return EnvironmentFormatter.format(environment)
class Create(command.ShowOne):
"""Create new environment."""
def get_parser(self, prog_name):
parser = super(Create, self).get_parser(prog_name)
parser.add_argument(
'file',
type=argparse.FileType('r'),
help='Environment configuration file in JSON or YAML'
)
return parser
def take_action(self, parsed_args):
data = utils.load_content(parsed_args.file.read())
mistral_client = self.app.client_manager.workflow_engine
environment = mistral_client.environments.create(**data)
return EnvironmentFormatter.format(environment)
class Delete(command.Command):
"""Delete environment."""
def get_parser(self, prog_name):
parser = super(Delete, self).get_parser(prog_name)
parser.add_argument(
'environment',
nargs='+',
help='Name of environment(s).'
)
return parser
def take_action(self, parsed_args):
mistral_client = self.app.client_manager.workflow_engine
utils.do_action_on_many(
lambda s: mistral_client.environments.delete(s),
parsed_args.environment,
"Request to delete environment %s has been accepted.",
"Unable to delete the specified environment(s)."
)
class Update(command.ShowOne):
"""Update environment."""
def get_parser(self, prog_name):
parser = super(Update, self).get_parser(prog_name)
parser.add_argument(
'file',
type=argparse.FileType('r'),
help='Environment configuration file in JSON or YAML'
)
return parser
def take_action(self, parsed_args):
data = utils.load_content(parsed_args.file.read())
mistral_client = self.app.client_manager.workflow_engine
environment = mistral_client.environments.update(**data)
return EnvironmentFormatter.format(environment)
| 30.098446
| 78
| 0.613531
|
48de5fa4626aa3a425fb0281fa232ffa828ea6ce
| 88
|
py
|
Python
|
basepy/exceptions.py
|
pyflow/insightpy
|
d80f11277a7903f0ca679ceae1def0037ca2d920
|
[
"MIT"
] | 3
|
2020-02-01T04:27:50.000Z
|
2020-04-28T13:50:10.000Z
|
basepy/exceptions.py
|
pyflow/insightpy
|
d80f11277a7903f0ca679ceae1def0037ca2d920
|
[
"MIT"
] | null | null | null |
basepy/exceptions.py
|
pyflow/insightpy
|
d80f11277a7903f0ca679ceae1def0037ca2d920
|
[
"MIT"
] | 1
|
2020-03-02T03:55:05.000Z
|
2020-03-02T03:55:05.000Z
|
class ConnectionError(Exception):
"""Failed to connect to the broker."""
pass
| 14.666667
| 42
| 0.670455
|
b4740d9c37c98a3bc3c018c209c14282cc80e50a
| 3,215
|
py
|
Python
|
src/api/cgi/yahoofinances/search/index.py
|
etinaude/9spokes
|
ed548aec08e659445a8b2dea1be910e73dc2e326
|
[
"MIT"
] | 2
|
2020-09-08T04:45:25.000Z
|
2020-09-28T04:23:15.000Z
|
src/api/cgi/yahoofinances/search/index.py
|
natisha99/9spokes
|
5252aea920761339e5af3154acb5d84664803cf8
|
[
"MIT"
] | 5
|
2020-07-14T13:03:47.000Z
|
2022-02-27T10:00:11.000Z
|
src/api/cgi/yahoofinances/search/index.py
|
etinaude/9spokes
|
ed548aec08e659445a8b2dea1be910e73dc2e326
|
[
"MIT"
] | 1
|
2020-09-08T04:46:54.000Z
|
2020-09-08T04:46:54.000Z
|
#!/usr/bin/pypy3
#!/usr/bin/python3
import cgi
import mysql.connector
from datetime import datetime, timedelta
from threading import Thread
from urllib.request import Request, urlopen
import json
def commit(company_name, results, cursor, cnx):
sql1 = "DELETE FROM yahoofinancessearch WHERE company_name='{}';".format(company_name)
sql2 = "INSERT INTO yahoofinancessearch VALUES('{}', '{}', '{}');".format(
company_name,
results,
str(datetime.now()))
cursor.execute(sql1)
cnx.commit()
cursor.execute(sql2)
cnx.commit()
cursor.close()
cnx.close()
def expected(dump):
return True
def site(company_name):
currencies = ['nzd', 'usd', 'eur', 'aud', 'sgd']
if company_name in currencies:
currencies.remove(company_name)
return json.dumps({'results':[company_name+'/'+c for c in currencies]})
elif len(company_name) == 7 and company_name[:3] in currencies and company_name[4:] in currencies:
return json.dumps({'results':[company_name[:3]+'/'+company_name[4:]]})
else:
url = 'https://nz.finance.yahoo.com/lookup?s={}'.format(company_name).replace(' ', '%20')
req = Request(url, headers={'User-Agent': 'Mozilla/5.0'})
webpage = urlopen(req).read()
html = webpage.decode('utf-8').replace('\r', '').replace('\n', '')
results = []
html = html[html.find('Symbols similar to'):]
index = html.find('data-symbol="')
while index != -1:
html = html[index+13:]
end = html.find('"')
results.append(html[:end])
index = html.find('data-symbol="')
output = []
for result in results:
if result not in output:
output.append(result)
return json.dumps({'results':output})
def main():
form = cgi.FieldStorage()
company_name = str(form['company_name'].value).lower().strip()
cnx = mysql.connector.connect(user='api', database='projectapi')
cursor = cnx.cursor(buffered=True)
sql = "SELECT * FROM yahoofinancessearch WHERE company_name='{}';".format(company_name)
cursor.execute(sql)
cache_results = ''
cache_expired = False
fetch_results = ''
results = ''
try:
data = list(cursor.fetchall()[0])
if (datetime.now()-timedelta(days=60)) > data[3]:
raise IndexError('item in database expired')
cache_results = data[2]
cursor.close()
cnx.close()
except:
cache_expired = True
fetch_results = site(company_name)
finally:
if not cache_expired:
results = cache_results
elif expected(fetch_results):
t1 = Thread(target=commit, args=(company_name, fetch_results, cursor, cnx,))
t1.start()
results = fetch_results
elif cache_expired:
results = cache_results
else:
results = json.dumps({'error':'api access problem'})
return results
if __name__ == '__main__':
print('Content-type:application/json', end='\r\n\r\n')
print(main().encode(encoding='UTF-8',errors='ignore').decode(), end='')
| 33.489583
| 102
| 0.596579
|
31975e8aeb381a043250eae7c443773c89c9c5b6
| 3,643
|
py
|
Python
|
scripts/get_dates.py
|
senseconcordia/NFBugsExtended
|
60058ccbd64107018a92ede73056d08ecbdaaed2
|
[
"MIT"
] | 2
|
2021-05-23T07:21:28.000Z
|
2021-05-26T03:44:17.000Z
|
scripts/get_dates.py
|
senseconcordia/NFBugsExtended
|
60058ccbd64107018a92ede73056d08ecbdaaed2
|
[
"MIT"
] | null | null | null |
scripts/get_dates.py
|
senseconcordia/NFBugsExtended
|
60058ccbd64107018a92ede73056d08ecbdaaed2
|
[
"MIT"
] | null | null | null |
# import csv
# bug_fixing_hash = ''
# bug_inducing_hash = ''
# with open('cassandra.csv') as csvfile:
# commits = csv.DictReader(csvfile, delimiter=',')
# for commit in commits:
# if any(issue in commit['commit_message'] for issue in perf_tags):
# perf_fix_commits.append(commit['commit_hash'])
# number_fix_perf_commits = number_fix_perf_commits + 1
# csvfile.close()
# print(number_fix_perf_commits, 'performance fix commits')
# # print(str(perf_fix_commits))
# # ----------------------------------------------------------------------------------------
# perf_glm_probability_list = []
# perf_rf_probability_list = []
# perf_induce_commits = []
# with open('cassandra.csv') as csvfile:
# commits = csv.DictReader(csvfile, delimiter=',')
# for commit in commits:
# if any(perf_fix in commit['fixes'] for perf_fix in perf_fix_commits):
# perf_induce_commits.append(commit['commit_hash'])
# csvfile.close()
# print(str(len(perf_induce_commits)),'performance induce commits')
# # ----------------------------------------------------------------------------------------
# perf_commits = perf_fix_commits + perf_induce_commits
# with open('cassandra.csv') as csvfile, open('perf_cassandra.csv','w+') as f_out:
# commits = csv.DictReader(csvfile, delimiter=',')
# writer = csv.DictWriter(f_out, fieldnames=commits.fieldnames)
# writer.writeheader() # For writing header
# for commit in commits:
# for perf_c in perf_commits:
# if perf_c in commit['commit_hash']:
# writer.writerow(commit)
# csvfile.close()
# # import csv
# # with open('D:/test.csv', 'r') as f, open('D:/out.csv', 'w') as f_out:
# # reader = csv.DictReader(f)
# # writer = csv.DictWriter(f_out, fieldnames=reader.fieldnames)
# # writer.writeheader() # For writing header
# # for row in reader:
# # if row['ICLEVEL'] == '1':
# # writer.writerow(row)
# # ----------------------------------------------------------------------------------------
import datetime
# timestamp1 = datetime.datetime(2017, 12, 1).strftime('%s')
# timestamp2 = datetime.datetime(2017, 11, 14).strftime('%s')
# print(timestamp1)
# print(timestamp2)
timestamp1 = 1575291678
timestamp2 = 1542727422
# 1512079200
# 1510610400
def calculate_date(timestamp1, timestamp2):
print((float(timestamp1)-float(timestamp2))/(60*60*24))
if (float(timestamp1)-float(timestamp2))/(60*60*24)>0:
print('positive')
else:
print('negative')
calculate_date(1406905606,1281245374)
calculate_date(1218042255,1162820275)
calculate_date(1192609411,1192525249)
calculate_date(1171049299,1162761361)
calculate_date(1169823832,1169659807)
calculate_date(1169823832,1169274676)
calculate_date(1169823832,1162761361)
calculate_date(1169823832,1169274676)
calculate_date(1169823832,1169275454)
calculate_date(1169823832,1169274676)
calculate_date(1164121622,1163687642)
from datetime import datetime
ts = int("1284101485")
# if you encounter a "year is out of range" error the timestamp
# may be in milliseconds, try `ts /= 1000` in that case
print(datetime.utcfromtimestamp(int(1406905606)).strftime('%Y-%m-%d %H:%M:%S'))
print(datetime.utcfromtimestamp(int(1218042255)).strftime('%Y-%m-%d %H:%M:%S'))
print(datetime.utcfromtimestamp(int(1192609411)).strftime('%Y-%m-%d %H:%M:%S'))
print(datetime.utcfromtimestamp(int(1171049299)).strftime('%Y-%m-%d %H:%M:%S'))
print(datetime.utcfromtimestamp(int(1169823832)).strftime('%Y-%m-%d %H:%M:%S'))
print(datetime.utcfromtimestamp(int(1164121622)).strftime('%Y-%m-%d %H:%M:%S'))
| 32.526786
| 92
| 0.645896
|
3daf27cd148bceeb965db4cbf0f5a77ae7f36736
| 1,403
|
py
|
Python
|
xm06_LinkList_InPlaceReversalOfALinkedList/IPRAL_02_Reverse_a_linklist.py
|
RagnorLixiaomeng/grokkingTheCoding
|
a46aea3e5895f6dbe23aa008e3f635d2d758a714
|
[
"Apache-2.0"
] | null | null | null |
xm06_LinkList_InPlaceReversalOfALinkedList/IPRAL_02_Reverse_a_linklist.py
|
RagnorLixiaomeng/grokkingTheCoding
|
a46aea3e5895f6dbe23aa008e3f635d2d758a714
|
[
"Apache-2.0"
] | null | null | null |
xm06_LinkList_InPlaceReversalOfALinkedList/IPRAL_02_Reverse_a_linklist.py
|
RagnorLixiaomeng/grokkingTheCoding
|
a46aea3e5895f6dbe23aa008e3f635d2d758a714
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# @Time : 2021/5/27 10:28 PM
# @Author: lixiaomeng_someday
# @Email : 131xxxx119@163.com
# @File : IPRAL_02_Reverse_a_linklist.py
"""
story:
Given the head of a Singly LinkedList, reverse the LinkedList.
Write a function to return the new head of the reversed LinkedList.
analysis:
1、用模式-一个previous, 一个current,我感觉跟尼玛双指针法一样
Instance:
input: Nodes of original LinkedList are: 2 4 6 8 10
output: Nodes of reversed LinkedList are: 10 8 6 4 2
"""
from grokkingTheCoding.DataStructure.link_list import Link_list
def reverse_link_list(linklist: Link_list)-> Link_list:
previous, current, _next = None, linklist, None # init variable
while current is not None:
# step1 : 先把current 的正序的next存储起来
_next = current.next
# step2 : 既然是反转,就把current 的 next指向以前的previous
current.next = previous
# step3 : 既然是反转,那本来的previous应该指向当前的current
previous = current
# step4 : 最后移动到下一个链表的元素上,这里就是步骤1存在的意义
current = _next
# return linklist # 这个是错的,你知道为啥吗,因为经过反转后,结合你定义的数据结构,previous才是最后保存所有的人
return previous
def main():
test = Link_list(2)
test.next = Link_list(4)
test.next.next = Link_list(6)
test.next.next.next = Link_list(8)
test.next.next.next.next = None
test.print_link_list()
reverse_link_list(test).print_link_list()
if __name__ == "__main__":
main()
| 26.980769
| 75
| 0.693514
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.